self.parse_inet_line(words, current_if, ips) elif words[0] == 'inet6': self.parse_inet6_line(words, current_if, ips) elif words[0] == 'tunnel': self.parse_tunnel_line(words, current_if, ips) else: self.parse_unknown_line(words, current_if, ips) return interfaces, ips def parse_interface_line(self, words): device = words[0][0:-1] current_if = {'device': device, 'ipv4': [], 'ipv6': [], 'type': 'unknown'} current_if['flags'] = self.get_options(words[1]) if 'LOOPBACK' in current_if['flags']: current_if['type'] = 'loopback' current_if['macaddress'] = 'unknown' # will be overwritten later if len(words) >= 5: # Newer FreeBSD versions current_if['metric'] = words[3] current_if['mtu'] = words[5] else: current_if['mtu'] = words[3] return current_if def parse_options_line(self, words, current_if, ips): # Mac has options like this... current_if['options'] = self.get_options(words[0]) def parse_nd6_line(self, words, current_if, ips): # FreeBSD has options like this... current_if['options'] = self.get_options(words[1]) def parse_ether_line(self, words, current_if, ips): current_if['macaddress'] = words[1] current_if['type'] = 'ether' def parse_media_line(self, words, current_if, ips): # not sure if this is useful - we also drop information current_if['media'] = words[1] if len(words) > 2: current_if['media_select'] = words[2] if len(words) > 3: current_if['media_type'] = words[3][1:] if len(words) > 4: current_if['media_options'] = self.get_options(words[4]) def parse_status_line(self, words, current_if, ips): current_if['status'] = words[1] def parse_lladdr_line(self, words, current_if, ips): current_if['lladdr'] = words[1] def parse_inet_line(self, words, current_if, ips): # netbsd show aliases like this # lo0: flags=8049 mtu 33184 # inet 127.0.0.1 netmask 0xff000000 # inet alias 127.1.1.1 netmask 0xff000000 if words[1] == 'alias': del words[1] address = {'address': words[1]} # cidr style ip address (eg, 127.0.0.1/24) in inet line # used in netbsd ifconfig -e output after 7.1 if '/' in address['address']: ip_address, cidr_mask = address['address'].split('/') address['address'] = ip_address netmask_length = int(cidr_mask) netmask_bin = (1 << 32) - (1 << 32 >> int(netmask_length)) address['netmask'] = socket.inet_ntoa(struct.pack('!L', netmask_bin)) if len(words) > 5: address['broadcast'] = words[3] else: # deal with hex netmask if re.match('([0-9a-f]){8}', words[3]) and len(words[3]) == 8: words[3] = '0x' + words[3] if words[3].startswith('0x'): address['netmask'] = socket.inet_ntoa(struct.pack('!L', int(words[3], base=16))) else: # otherwise assume this is a dotted quad address['netmask'] = words[3] # calculate the network address_bin = struct.unpack('!L', socket.inet_aton(address['address']))[0] netmask_bin = struct.unpack('!L', socket.inet_aton(address['netmask']))[0] address['network'] = socket.inet_ntoa(struct.pack('!L', address_bin & netmask_bin)) if 'broadcast' not in address: # broadcast may be given or we need to calculate if len(words) > 5: address['broadcast'] = words[5] else: address['broadcast'] = socket.inet_ntoa(struct.pack('!L', address_bin | (~netmask_bin & 0xffffffff))) # add to our list of addresses if not words[1].startswith('127.'): ips['all_ipv4_addresses'].append(address['address']) current_if['ipv4'].append(address) def parse_inet6_line(self, words, current_if, ips): address = {'address': words[1]} # using cidr style addresses, ala NetBSD ifconfig post 7.1 if '/' in address['address']: ip_address, cidr_mask = address['address'].split('/') address['address'] = ip_address address['prefix'] = cidr_mask if len(words) > 5: address['scope'] = words[5] else: if (len(words) >= 4) and (words[2] == 'prefixlen'): address['prefix'] = words[3] if (len(words) >= 6) and (words[4] == 'scopeid'): address['scope'] = words[5] localhost6 = ['::1', '::1/128', 'fe80::1%lo0'] if address['address'] not in localhost6: ips['all_ipv6_addresses'].append(address['address']) current_if['ipv6'].append(address) def parse_tunnel_line(self, words, current_if, ips): current_if['type'] = 'tunnel' def parse_unknown_line(self, words, current_if, ips): # we are going to ignore unknown lines here - this may be # a bad idea - but you can override it in your subclass pass # TODO: these are module scope static function candidates # (most of the class is really...) def get_options(self, option_string): start = option_string.find('<') + 1 end = option_string.rfind('>') if (start > 0) and (end > 0) and (end > start + 1): option_csv = option_string[start:end] return option_csv.split(',') else: return [] def merge_default_interface(self, defaults, interfaces, ip_type): if 'interface' not in defaults: return if not defaults['interface'] in interfaces: return ifinfo = interfaces[defaults['interface']] # copy all the interface values across except addresses for item in ifinfo: if item != 'ipv4' and item != 'ipv6': defaults[item] = ifinfo[item] if len(ifinfo[ip_type]) > 0: for item in ifinfo[ip_type][0]: defaults[item] = ifinfo[ip_type][0][item] # -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTestCase.test_loseConnection -*- # Copyright (c) 2006-2007 Twisted Matrix Laboratories. # See LICENSE for details. """ Main program for the child process run by L{twisted.test.test_stdio.StandardInputOutputTestCase.test_loseConnection} to test that ITransport.loseConnection() works for process transports. """ import sys from twisted.internet.error import ConnectionDone from twisted.internet import stdio, protocol from twisted.python import reflect, log class LoseConnChild(protocol.Protocol): exitCode = 0 def connectionMade(self): self.transport.loseConnection() def connectionLost(self, reason): """ Check that C{reason} is a L{Failure} wrapping a L{ConnectionDone} instance and stop the reactor. If C{reason} is wrong for some reason, log something about that in C{self.errorLogFile} and make sure the process exits with a non-zero status. """ try: try: reason.trap(ConnectionDone) except: log.err(None, "Problem with reason passed to connectionLost") self.exitCode = 1 finally: reactor.stop() if __name__ == '__main__': reflect.namedAny(sys.argv[1]).install() log.startLogging(file(sys.argv[2], 'w')) from twisted.internet import reactor protocol = LoseConnChild() stdio.StandardIO(protocol) reactor.run() sys.exit(protocol.exitCode) # coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from ..compat import compat_str from ..utils import ( ExtractorError, int_or_none, try_get, unified_timestamp, ) class FreshLiveIE(InfoExtractor): _VALID_URL = r'https?://freshlive\.tv/[^/]+/(?P\d+)' _TEST = { 'url': 'https://freshlive.tv/satotv/74712', 'md5': '9f0cf5516979c4454ce982df3d97f352', 'info_dict': { 'id': '74712', 'ext': 'mp4', 'title': 'テスト', 'description': 'テスト', 'thumbnail': r're:^https?://.*\.jpg$', 'duration': 1511, 'timestamp': 1483619655, 'upload_date': '20170105', 'uploader': 'サトTV', 'uploader_id': 'satotv', 'view_count': int, 'comment_count': int, 'is_live': False, } } def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) options = self._parse_json( self._search_regex( r'window\.__CONTEXT__\s*=\s*({.+?});\s*', webpage, 'initial context'), video_id) info = options['context']['dispatcher']['stores']['ProgramStore']['programs'][video_id] title = info['title'] if info.get('status') == 'upcoming': raise ExtractorError('Stream %s is upcoming' % video_id, expected=True) stream_url = info.get('liveStreamUrl') or info['archiveStreamUrl'] is_live = info.get('liveStreamUrl') is not None formats = self._extract_m3u8_formats( stream_url, video_id, 'mp4', 'm3u8_native', m3u8_id='hls') if is_live: title = self._live_title(title) return { 'id': video_id, 'formats': formats, 'title': title, 'description': info.get('description'), 'thumbnail': info.get('thumbnailUrl'), 'duration': int_or_none(info.get('airTime')), 'timestamp': unified_timestamp(info.get('createdAt')), 'uploader': try_get( info, lambda x: x['channel']['title'], compat_str), 'uploader_id': try_get( info, lambda x: x['channel']['code'], compat_str), 'uploader_url': try_get( info, lambda x: x['channel']['permalink'], compat_str), 'view_count': int_or_none(info.get('viewCount')), 'comment_count': int_or_none(info.get('commentCount')), 'tags': info.get('tags', []), 'is_live': is_live, } #!/usr/bin/python # # Copyright (C) 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'api.jscudder (Jeff Scudder)' import urlparse import urllib DEFAULT_PROTOCOL = 'http' DEFAULT_PORT = 80 def parse_url(url_string): """Creates a Url object which corresponds to the URL string. This method can accept partial URLs, but it will leave missing members of the Url unset. """ parts = urlparse.urlparse(url_string) url = Url() if parts[0]: url.protocol = parts[0] if parts[1]: host_parts = parts[1].split(':') if host_parts[0]: url.host = host_parts[0] if len(host_parts) > 1: url.port = host_parts[1] if parts[2]: url.path = parts[2] if parts[4]: param_pairs = parts[4].split('&') for pair in param_pairs: pair_parts = pair.split('=') if len(pair_parts) > 1: url.params[urllib.unquote_plus(pair_parts[0])] = ( urllib.unquote_plus(pair_parts[1])) elif len(pair_parts) == 1: url.params[urllib.unquote_plus(pair_parts[0])] = None return url class Url(object): """Represents a URL and implements comparison logic. URL strings which are not identical can still be equivalent, so this object provides a better interface for comparing and manipulating URLs than strings. URL parameters are represented as a dictionary of strings, and defaults are used for the protocol (http) and port (80) if not provided. """ def __init__(self, protocol=None, host=None, port=None, path=None, params=None): self.protocol = protocol self.host = host self.port = port self.path = path self.params = params or {} def to_string(self): url_parts = ['', '', '', '', '', ''] if self.protocol: url_parts[0] = self.protocol if self.host: if self.port: url_parts[1] = ':'.join((self.host, str(self.port))) else: url_parts[1] = self.host if self.path: url_parts[2] = self.path if self.params: url_parts[4] = self.get_param_string() return urlparse.urlunparse(url_parts) def get_param_string(self): param_pairs = [] for key, value in self.params.iteritems(): param_pairs.append('='.join((urllib.quote_plus(key), urllib.quote_plus(str(value))))) return '&'.join(param_pairs) def get_request_uri(self): """Returns the path with the parameters escaped and appended.""" param_string = self.get_param_string() if param_string: return '?'.join([self.path, param_string]) else: return self.path def __cmp__(self, other): if not isinstance(other, Url): return cmp(self.to_string(), str(other)) difference = 0 # Compare the protocol if self.protocol and other.protocol: difference = cmp(self.protocol, other.protocol) elif self.protocol and not other.protocol: difference = cmp(self.protocol, DEFAULT_PROTOCOL) elif not self.protocol and other.protocol: difference = cmp(DEFAULT_PROTOCOL, other.protocol) if difference != 0: return difference # Compare the host difference = cmp(self.host, other.host) if difference != 0: return difference # Compare the port if self.port and other.port: difference = cmp(self.port, other.port) elif self.port and not other.port: difference = cmp(self.port, DEFAULT_PORT) elif not self.port and other.port: difference = cmp(DEFAULT_PORT, other.port) if difference != 0: return difference # Compare the path difference = cmp(self.path, other.path) if difference != 0: return difference # Compare the parameters return cmp(self.params, other.params) def __str__(self): return self.to_string() #!/usr/bin/env python """ Test support for 'with' statements in Python >= 2.5 """ # Copyright (C) 2009 Barry Pederson # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 from __future__ import with_statement import unittest import settings from amqplib.client_0_8 import Connection, Message class TestChannel(unittest.TestCase): def test_with(self): with Connection(**settings.connect_args) as conn: self.assertEqual(conn.transport is None, False) with conn.channel(1) as ch: self.assertEqual(1 in conn.channels, True) # # Do something with the channel # ch.access_request('/data', active=True, write=True) ch.exchange_declare('unittest.fanout', 'fanout', auto_delete=True) msg = Message('unittest message', content_type='text/plain', application_headers={'foo': 7, 'bar': 'baz'}) ch.basic_publish(msg, 'unittest.fanout') # # check that the channel was closed # self.assertEqual(1 in conn.channels, False) self.assertEqual(ch.is_open, False) # # Check that the connection was closed # self.assertEqual(conn.transport, None) def main(): suite = unittest.TestLoader().loadTestsFromTestCase(TestChannel) unittest.TextTestRunner(**settings.test_args).run(suite) if __name__ == '__main__': main() # -*- coding: utf-8 -*- """ Created on 09.06.16 Created for pyclamster-gui Copyright (C) {2016} This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . """ # System modules import copy # External modules import numpy as np # Internal modules __version__ = "0.1" class region(object): # image_margins = [left,right,top,bottom] def __init__(self,center=None,image_margins=None,image_data=None,region_data=None): self.center = np.array([0,0]) self.data = None # [[x, y, r, g, b], ...] (x,y based on region center) if isinstance(center, np.ndarray): if center.size == 2: self.center = center elif isinstance(image_margins, np.ndarray): if image_margins.size == 4: self.center = self._calc_image_center(image_margins) if isinstance(region_data,np.ndarray): if region_data.shape[1] == 5 and len(region_data.shape) == 2: self.data = region_data elif isinstance(image_data,np.ndarray): if image_data.shape[1] == 5 and len(image_data.shape) == 2: self.data = image_data self.data[:,0] = self.data[:,0] - self.center[0] self.data[:,1] = self.data[:,1] - self.center[1] def addData(self, image, x, y, center = None): if not isinstance(center,np.ndarray): center = self.center img_x = center[0] + x img_y = center[1] + y if not isinstance(self.data,np.ndarray): self.data = np.array([x,y,image[img_x,img_y,0],image[img_x,img_y,1],image[img_x,img_y,2]]).T else: # remove already added entries not_existing_entries = np.array([not [z[0],z[1]] in self.data[:,0:2].tolist() for z in zip(x,y)]) x = x[not_existing_entries] y = y[not_existing_entries] # get new data that should be added new_data = np.array([x,y,image[img_x,img_y,0],image[img_x,img_y,1],image[img_x,img_y,2]]).T # append existing data self.data = np.append(self.data,new_data,0) def removeData(self, x, y, center_offset = None): if not isinstance(center_offset,np.ndarray): center_offset = np.array([0,0]) x = x + center_offset[0] y = y + center_offset[1] # find existing entries z = np.array([x,y]).T if len(z.shape) == 1: z = np.array([z]) existing_entries = np.array([z[i].tolist() in self.data[:,0:2].tolist() for i in range(len(z))]) z = z[existing_entries] # define which entries to keep keep_entries = [not di in z.tolist() for di in self.data[:,0:2].tolist()] if any(keep_entries): self.data = self.data[np.array(keep_entries)] else: self.data = None def addRegion(self, region, center_offset = None): new_data = copy.copy(region.data) if not isinstance(center_offset,np.ndarray): center_offset = region.center - self.center new_data[:,0] = new_data[:,0] + center_offset[0] new_data[:,1] = new_data[:,1] + center_offset[1] # find not existing entries not_existing_entries = np.array([not zi in self.data[:,0:2].tolist() for zi in new_data[:,0:2].tolist()]) self.data = np.append(self.data,new_data[not_existing_entries],0) def removeRegion(self, region, center_offset = None): rm_x = region.data[:,0] rm_y = region.data[:,1] if not isinstance(center_offset, np.ndarray): center_offset = region.center - self.center self.removeData(rm_x, rm_y, center_offset) def cropImageRegion(self, image, center = None): #TODO if not isinstance(center,np.ndarray): center = self.center pass def exportToMask(self, image_margins, center = None): #TODO if not isinstance(center,np.ndarray): center = self.center pass def _calc_image_center(self, image_margins): x = (image_margins[1]-image_margins[0])*.5 y = (image_margins[3]-image_margins[2])*.5 return np.array([x,y],dtype=int) if __name__ == '__main__': import numpy as np r = region(region_data=np.array([[1,1,0,99,99]]),center=np.array([960,960])) r2 = region(region_data=np.array([[1,1,99,0,99]]),center=np.array([0,0])) r3 = region(image_data =np.array([[1,1,99,99,0]]),image_margins = np.array([0,1920,0,1920])) r.addRegion(r2) r.addRegion(r2,center_offset = np.array([10,100])) #!/usr/bin/env python ############################################################################# ## ## Copyright (C) 2010 Riverbank Computing Limited. ## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). ## All rights reserved. ## ## This file is part of the examples of PyQt. ## ## $QT_BEGIN_LICENSE:BSD$ ## You may use this file under the terms of the BSD license as follows: ## ## "Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are ## met: ## * Redistributions of source code must retain the above copyright ## notice, this list of conditions and the following disclaimer. ## * Redistributions in binary form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in ## the documentation and/or other materials provided with the ## distribution. ## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor ## the names of its contributors may be used to endorse or promote ## products derived from this software without specific prior written ## permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT ## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, ## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT ## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, ## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY ## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT ## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## $QT_END_LICENSE$ ## ############################################################################# from PyQt4 import QtCore, QtGui, QtNetwork class FortuneThread(QtCore.QThread): newFortune = QtCore.pyqtSignal(str) error = QtCore.pyqtSignal(int, str) def __init__(self, parent=None): super(FortuneThread, self).__init__(parent) self.quit = False self.hostName = '' self.cond = QtCore.QWaitCondition() self.mutex = QtCore.QMutex() self.port = 0 def __del__(self): self.mutex.lock() self.quit = True self.cond.wakeOne() self.mutex.unlock() self.wait() def requestNewFortune(self, hostname, port): locker = QtCore.QMutexLocker(self.mutex) self.hostName = hostname self.port = port if not self.isRunning(): self.start() else: self.cond.wakeOne() def run(self): self.mutex.lock() serverName = self.hostName serverPort = self.port self.mutex.unlock() while not self.quit: Timeout = 5 * 1000 socket = QtNetwork.QTcpSocket() socket.connectToHost(serverName, serverPort) if not socket.waitForConnected(Timeout): self.error.emit(socket.error(), socket.errorString()) return while socket.bytesAvailable() < 2: if not socket.waitForReadyRead(Timeout): self.error.emit(socket.error(), socket.errorString()) return instr = QtCore.QDataStream(socket) instr.setVersion(QtCore.QDataStream.Qt_4_0) blockSize = instr.readUInt16() while socket.bytesAvailable() < blockSize: if not socket.waitForReadyRead(Timeout): self.error.emit(socket.error(), socket.errorString()) return self.mutex.lock() fortune = instr.readString() try: # Python v3. fortune = str(fortune, encoding='ascii') except TypeError: # Python v2. pass self.newFortune.emit(fortune) self.cond.wait(self.mutex) serverName = self.hostName serverPort = self.port self.mutex.unlock() class BlockingClient(QtGui.QDialog): def __init__(self, parent=None): super(BlockingClient, self).__init__(parent) self.thread = FortuneThread() self.currentFortune = '' hostLabel = QtGui.QLabel("&Server name:") portLabel = QtGui.QLabel("S&erver port:") self.hostLineEdit = QtGui.QLineEdit('Localhost') self.portLineEdit = QtGui.QLineEdit() self.portLineEdit.setValidator(QtGui.QIntValidator(1, 65535, self)) hostLabel.setBuddy(self.hostLineEdit) portLabel.setBuddy(self.portLineEdit) self.statusLabel = QtGui.QLabel("This example requires that you run " "the Fortune Server example as well.") self.getFortuneButton = QtGui.QPushButton("Get Fortune") self.getFortuneButton.setDefault(True) self.getFortuneButton.setEnabled(False) quitButton = QtGui.QPushButton("Quit") buttonBox = QtGui.QDialogButtonBox() buttonBox.addButton(self.getFortuneButton, QtGui.QDialogButtonBox.ActionRole) buttonBox.addButton(quitButton, QtGui.QDialogButtonBox.RejectRole) self.hostLineEdit.textChanged.connect(self.enableGetFortuneButton) self.portLineEdit.textChanged.connect(self.enableGetFortuneButton) self.getFortuneButton.clicked.connect(self.requestNewFortune) quitButton.clicked.connect(self.close) self.thread.newFortune.connect(self.showFortune) self.thread.error.connect(self.displayError) mainLayout = QtGui.QGridLayout() mainLayout.addWidget(hostLabel, 0, 0) mainLayout.addWidget(self.hostLineEdit, 0, 1) mainLayout.addWidget(portLabel, 1, 0) mainLayout.addWidget(self.portLineEdit, 1, 1) mainLayout.addWidget(self.statusLabel, 2, 0, 1, 2) mainLayout.addWidget(buttonBox, 3, 0, 1, 2) self.setLayout(mainLayout) self.setWindowTitle("Blocking Fortune Client") self.portLineEdit.setFocus() def requestNewFortune(self): self.getFortuneButton.setEnabled(False) self.thread.requestNewFortune(self.hostLineEdit.text(), int(self.portLineEdit.text())) def showFortune(self, nextFortune): if nextFortune == self.currentFortune: self.requestNewFortune() return self.currentFortune = nextFortune self.statusLabel.setText(self.currentFortune) self.getFortuneButton.setEnabled(True) def displayError(self, socketError, message): if socketError == QtNetwork.QAbstractSocket.HostNotFoundError: QtGui.QMessageBox.information(self, "Blocking Fortune Client", "The host was not found. Please check the host and port " "settings.") elif socketError == QtNetwork.QAbstractSocket.ConnectionRefusedError: QtGui.QMessageBox.information(self, "Blocking Fortune Client", "The connection was refused by the peer. Make sure the " "fortune server is running, and check that the host name " "and port settings are correct.") else: QtGui.QMessageBox.information(self, "Blocking Fortune Client", "The following error occurred: %s." % message) self.getFortuneButton.setEnabled(True) def enableGetFortuneButton(self): self.getFortuneButton.setEnabled(bool(self.hostLineEdit.text() and self.portLineEdit.text())) if __name__ == '__main__': import sys app = QtGui.QApplication(sys.argv) client = BlockingClient() client.show() sys.exit(client.exec_()) # ------------------------------------------------------------------------------------------------------------- ## Import Statements # ------------------------------------------------------------------------------------------------------------- from __future__ import print_function import sys from .utils import get_builds, get_ggd_channels, get_species SPECIES_LIST = sorted(get_species(update_files=False)) GENOME_BUILDS = sorted(get_builds("*")) CHANNEL_LIST = [x.encode("ascii") for x in get_ggd_channels()] # ------------------------------------------------------------------------------------------------------------- ## Argument Parser # ------------------------------------------------------------------------------------------------------------- def add_search(p): c = p.add_parser( "search", help="Search for a ggd data package", description="Search for available ggd data packages. Results are filtered by match score from high to low. (Only 5 results will be reported unless the -dn flag is changed)", ) c.add_argument( "search_term", nargs="+", help="**Required** The term(s) to search for. Multiple terms can be used. Example: 'ggd search reference genome'", ) c.add_argument( "--search-type", default="both", choices=["both", "combined-only", "non-combined-only"], help=( "(Optional) How to search for data packages with the search terms provided. Options = 'combined-only', 'non-combined-only', and 'both'." " 'combined-only' will use the provided search terms as a single search term. 'non-combined-only' will use the provided search term to search for" " data package that match each search term separately. 'both' will use the search terms combined and each search term separately to search" " for data packages. Default = 'both'" ), ) c.add_argument( "-g", "--genome-build", default=[], action="append", choices=[str(x) for x in GENOME_BUILDS], help="(Optional) Filter results by the genome build of the desired recipe", ) c.add_argument( "-s", "--species", default=[], action="append", help="(Optional) Filter results by the species for the desired recipe", choices=[str(x) for x in SPECIES_LIST], ) c.add_argument( "-dn", "--display-number", default=5, help="(Optional) The number of search results to display. (Default = 5)", ) c.add_argument( "-m", "--match-score", default="90", help="(Optional) A score between 0 and 100 to use to filter the results by. (Default = 90). The lower the number the more results will be output", ) c.add_argument( "-c", "--channel", help="(Optional) The ggd channel to search. (Default = genomics)", choices=[x.decode("ascii") for x in CHANNEL_LIST], default="genomics", ) c.set_defaults(func=search) # ------------------------------------------------------------------------------------------------------------- ## Functions/Methods # ------------------------------------------------------------------------------------------------------------- def load_json(jfile): """Method to load a json file into a dictionary load_json ========= Method to load a json file Parameters: --------- 1) jfile: (str) The path to the json file Returns: 1) (dict) A dictionary of a json object """ import json with open(jfile) as jsonFile: return json.load(jsonFile) def load_json_from_url(json_url): """Method to load a json file from a url load_json_from_url ================== Method to load a json file from a url. Uses the requests module to get the json file from the url. Parameters: --------- 1) json_url: (str) The url to the json path Returns: ++++++++ 1) (dict) A dictionary of a json object """ import json import traceback import requests try: return requests.get(json_url).json() except ValueError as e: sys.stderr.write("\n:ggd:search: !!ERROR!! in loading json file from url") sys.stderr.write("\n\t Invalid URL: %s" % json_url) sys.stderr.write(str(e)) sys.stderr.write(traceback.format_exc()) sys.exit(1) def search_packages(json_dict, search_terms, search_type="both", score_cutoff=50): """Method to search for ggd packages in the ggd channeldata.json metadata file based on user provided search terms search_packages =============== Method to search for ggd packages/recipes containing specific search terms NOTE: Both the package name and the package keywords are searched Parameters: --------- 1) json_dict: (dict) A json file loaded into a dictionary. (The file to search) the load_json_from_url() method creates the dictionary 2) search_terms: (list) A list of terms representing package names or keywords to search for 3) search_type: (str) A string matching either 'both', 'combined-only', or 'non-combined-only', representing how to use the search terms. 4) score_cutoff: (int) A number between 0 and 100 that represent which matches to return (Default = 50) Returns: ++++++++ 1) (dict) A list of pkg names who's either name or keyword match score reached the score cutoff """ import re from collections import defaultdict from fuzzywuzzy import fuzz, process pkg_score = defaultdict(lambda: defaultdict(float)) ## Get final search terms based on search type final_search_terms = [] if search_type == "both": final_search_terms.append(" ".join(search_terms)) final_search_terms.extend(search_terms) if search_type == "combined-only": final_search_terms.append(" ".join(search_terms)) if search_type == "non-combined-only": final_search_terms = search_terms ## Search for data packages for term in final_search_terms: for pkg in json_dict["packages"].keys(): ## Get match score between name and term score = fuzz.partial_ratio(term.lower(), pkg.lower()) ## Get the max score from all keyword scores found keyword_max_score = max( [ fuzz.ratio(term.lower(), x.lower()) for x in [ subkeyword for keyword in json_dict["packages"][pkg]["keywords"] for subkeyword in re.split("-|_", keyword.strip()) ] + json_dict["packages"][pkg]["keywords"] ] ) ## Skip any package that does not meet the match score if score < score_cutoff and keyword_max_score < score_cutoff: continue ## Set max score in dict if float(pkg_score[pkg]["pkg_score"]) < float(score): pkg_score[pkg]["pkg_score"] = float(score) if float(pkg_score[pkg]["keyword_score"]) < float(keyword_max_score): pkg_score[pkg]["keyword_score"] = float(keyword_max_score) ## Get a final list of pkg names temp_pkg_list = sorted( [ [pkg, float(max_scores["pkg_score"])] for pkg, max_scores in pkg_score.items() if float(max_scores["pkg_score"]) >= float(score_cutoff) or float(max_scores["keyword_score"]) >= float(score_cutoff) ], key=lambda x: x[1], reverse=True, ) final_list = [pkg_list[0] for pkg_list in temp_pkg_list] return final_list def check_installed(ggd_recipe, ggd_jdict): """Method to check if the recipe has already been installed and is in the conda ggd storage path. check_if_installed ================== This method is used to check if the ggd package has been installed and is located in the ggd storage path. """ import glob import os from .utils import conda_root species = ggd_jdict["packages"][ggd_recipe]["identifiers"]["species"] build = ggd_jdict["packages"][ggd_recipe]["identifiers"]["genome-build"] version = ggd_jdict["packages"][ggd_recipe]["version"] CONDA_ROOT = conda_root() path = os.path.join(CONDA_ROOT, "share", "ggd", species, build, ggd_recipe, version) recipe_exists = glob.glob(path) if recipe_exists: return (True, path) else: return (False, None) def filter_by_identifiers(iden_keys, json_dict, filter_terms): """Method to filter a dictionary by an identifier field for the certain package. filter_by_identifiers ===================== A method used to filter the list of data packages by information in the identifiers field in the channeldata.json file Parameters: ---------- 1) iden_keys: (list) A list of he identifiers keys. Example = ["species","genome-build"] 2) json_dict: (dict) The json dictionary created from load_json() 3) filter_terms: (list) A list of the term(s) to filter by. Example: ["Homo_sapiens","hg19"] NOTE: List order of iden_keys should match list order of filter_terms Returns: ++++++++ 1) (dict) Updated/filtered json_dict """ import copy keys = json_dict["packages"].keys() key_count = len(keys) keys_to_keep = set() if len(iden_keys) > 0 and len(iden_keys) == len(filter_terms): for key in keys: for i, iden_key in enumerate(iden_keys): if iden_key in json_dict["packages"][key]["identifiers"]: if len(filter_terms[i]) == 0: continue if ( filter_terms[i] in json_dict["packages"][key]["identifiers"][iden_key] ): keys_to_keep.add(key) new_json_dict = copy.deepcopy(json_dict) ## Remove packages if len(keys_to_keep) > 0: for key in keys: if key not in keys_to_keep: del new_json_dict["packages"][key] if len(new_json_dict["packages"].keys()) == key_count: ## If unable to return a filtered set return the original match list print( "\n:ggd:search: WARNING: Unable to filter packages using: '%s'" % ", ".join(filter_terms) ) print("\tThe un-filtered list will be used\n") return new_json_dict def print_summary(search_terms, json_dict, match_list, installed_pkgs, installed_paths): """ Method to print the summary/results of the search print_summary ============ Method used to print out the final set of searched packages Parameters: --------- 1) search_terms: (list) The search terms from the user 2) json_dict: (dict) The json dictionary from the load_json() method 3) match_list: (list) The filtered and final set of searched recipes 4) installed_pkgs: (set) A set of pkg names that are installed 5) installed_paths: (dict) A dictionary with keys = pkg names, values = installed paths Returns: +++++++ 1) True if print summary printed out successfully """ dash = " " + "-" * 100 if len(match_list) < 1: print( "\n:ggd:search: No results for %s. Update your search term(s) and try again" % ", ".join(search_terms) ) sys.exit() print("\n", dash) for pkg in match_list: results = [] if pkg in json_dict["packages"]: # results.append("\n\t{} {}\n".format(("\033[1m" + "GGD Package:" + "\033[0m"), pkg)) results.append( "\n\t{}\n\t{}".format(("\033[1m" + pkg + "\033[0m"), "=" * len(pkg)) ) if ( "summary" in json_dict["packages"][pkg] and json_dict["packages"][pkg]["summary"] ): results.append( "\t{} {}".format( ("\033[1m" + "Summary:" + "\033[0m"), json_dict["packages"][pkg]["summary"], ) ) if ( "identifiers" in json_dict["packages"][pkg] and json_dict["packages"][pkg]["identifiers"] ): results.append( "\t{} {}".format( ("\033[1m" + "Species:" + "\033[0m"), json_dict["packages"][pkg]["identifiers"]["species"], ) ) results.append( "\t{} {}".format( ("\033[1m" + "Genome Build:" + "\033[0m"), json_dict["packages"][pkg]["identifiers"]["genome-build"], ) ) if ( "keywords" in json_dict["packages"][pkg] and json_dict["packages"][pkg]["keywords"] ): results.append( "\t{} {}".format( ("\033[1m" + "Keywords:" + "\033[0m"), ", ".join(json_dict["packages"][pkg]["keywords"]), ) ) if ( "tags" in json_dict["packages"][pkg] and json_dict["packages"][pkg]["tags"] ): if "cache" in json_dict["packages"][pkg]["tags"]: results.append( "\t{} {}".format( ("\033[1m" + "Cached:" + "\033[0m"), json_dict["packages"][pkg]["tags"]["cached"], ) ) if "data-provider" in json_dict["packages"][pkg]["tags"]: results.append( "\t{} {}".format( ("\033[1m" + "Data Provider:" + "\033[0m"), json_dict["packages"][pkg]["tags"]["data-provider"], ) ) if "data-version" in json_dict["packages"][pkg]["tags"]: results.append( "\t{} {}".format( ("\033[1m" + "Data Version:" + "\033[0m"), json_dict["packages"][pkg]["tags"]["data-version"], ) ) if "file-type" in json_dict["packages"][pkg]["tags"]: results.append( "\t{} {}".format( ("\033[1m" + "File type(s):" + "\033[0m"), ", ".join(json_dict["packages"][pkg]["tags"]["file-type"]), ) ) if "genomic-coordinate-base" in json_dict["packages"][pkg]["tags"]: results.append( "\t{} {}".format( ("\033[1m" + "Data file coordinate base:" + "\033[0m"), json_dict["packages"][pkg]["tags"][ "genomic-coordinate-base" ], ) ) if "final-files" in json_dict["packages"][pkg]["tags"]: results.append( "\t{} {}".format( ("\033[1m" + "Included Data Files:" + "\033[0m"), "\n\t\t" + "\n\t\t".join( json_dict["packages"][pkg]["tags"]["final-files"] ), ) ) else: results.append( "\t{} {}".format( ("\033[1m" + "Prefix Install WARNING:" + "\033[0m"), ( "This package has not been set up to use the --prefix flag when running ggd install." " Once installed, this package will work with other ggd tools that use the --prefix flag." ), ) ) if "final-file-sizes" in json_dict["packages"][pkg]["tags"]: results.append( "\t{} {}".format( ("\033[1m" + "Approximate Data File Sizes:" + "\033[0m"), "\n\t\t" + "\n\t\t".join( [ "{}: {}".format( x, json_dict["packages"][pkg]["tags"][ "final-file-sizes" ][x], ) for x in json_dict["packages"][pkg]["tags"][ "final-file-sizes" ] ] ), ) ) if pkg in installed_pkgs: ## IF installed results.append( "\n\tThis package is already installed on your system.\n\t You can find the installed data files here: %s" % installed_paths[pkg] ) else: from .utils import check_for_meta_recipes results.append( "\n\tTo install run:\n\t\tggd install %s %s" % ( pkg, "--id " if check_for_meta_recipes(pkg, json_dict) else "", ) ) print("\n\n".join(results)) print("\n", dash) print("\n\033[1m>>> Scroll up to see package details and install info <<<\033[0m") longest_pkg_name = max(map(len, match_list)) + 2 print("\n\n" + ("*" * longest_pkg_name)) print("\033[1mPackage Name Results\033[0m") print("====================\n") print("\n".join(match_list)) print("\nNOTE: Name order matches order of packages in detailed section above") print("*" * longest_pkg_name + "\n") return True def search(parser, args): """Main method for ggd search. search ===== Main method for running a recipe/package search Parameters: ---------- 1) parser 2) args """ from .utils import get_builds, get_channeldata_url ## load the channeldata.json file j_dict = load_json_from_url(get_channeldata_url(args.channel)) ## Remove the ggd key if it exists ggd_key = j_dict["packages"].pop("ggd", None) ## identify if search_terms have any species or genome build in them species_lower = {x.lower(): x for x in SPECIES_LIST} gb_lower = {x.lower(): x for x in GENOME_BUILDS} filtered_search_terms = [] for term in args.search_term: if term.lower() in species_lower.keys(): if species_lower[term.lower()] not in args.species: args.species.append(species_lower[term.lower()]) elif term.lower() in gb_lower.keys(): if gb_lower[term.lower()] not in args.genome_build: args.genome_build.append(gb_lower[term.lower()]) else: ## Only use search terms that are not used to filter the results by identifiers filtered_search_terms.append(term) ## genome_build takes precedence over species (If genome build provided, species is implied) final_species_list = args.species for species in args.species: build = get_builds(species) if [x for x in build if x in args.genome_build]: final_species_list.remove(species) args.species = final_species_list ## Filter the json dict by species or genome build if applicable if args.genome_build or args.species: j_dict = filter_by_identifiers( ["species"] * len(args.species) + ["genome-build"] * len(args.genome_build), j_dict, args.species + args.genome_build, ) ## Search pkg names and keywords match_results = search_packages( j_dict, filtered_search_terms, args.search_type, int(args.match_score) ) ## Get installed paths installed_dict = {} installed_set = set() for pkg in match_results: isinstalled, path = check_installed(pkg, j_dict) if isinstalled: installed_dict[pkg] = path installed_set.add(pkg) ## Print search results match_result_num = str(len(match_results)) if int(match_result_num) >= int(args.display_number): subset_match_results = match_results[0 : int(args.display_number)] else: subset_match_results = match_results ## Print search results to STDOUT printed = print_summary( args.search_term, j_dict, subset_match_results, installed_set, installed_dict ) ## Add a comment if a subset of search results are provided if int(match_result_num) > int(args.display_number): print( "\n\n:ggd:search: NOTE: Only showing results for top {d} of {m} matches.".format( d=str(args.display_number), m=match_result_num ) ) print( ":ggd:search: To display all matches append your search command with '-dn {m}'".format( m=match_result_num ) ) print( "\n\t ggd search {t} -dn {m}\n".format( t=" ".join(args.search_term), m=match_result_num ) ) ## Return result of print_summary return printed # # Autogenerated by Thrift Compiler (0.9.0) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # # options string: py # from thrift.Thrift import TType, TMessageType, TException, TApplicationException import hive_metastore.ThriftHiveMetastore from ttypes import * from thrift.Thrift import TProcessor from thrift.transport import TTransport from thrift.protocol import TBinaryProtocol, TProtocol try: from thrift.protocol import fastbinary except: fastbinary = None class Iface(hive_metastore.ThriftHiveMetastore.Iface): def execute(self, query): """ Parameters: - query """ pass def fetchOne(self, ): pass def fetchN(self, numRows): """ Parameters: - numRows """ pass def fetchAll(self, ): pass def getSchema(self, ): pass def getThriftSchema(self, ): pass def getClusterStatus(self, ): pass def getQueryPlan(self, ): pass def clean(self, ): pass class Client(hive_metastore.ThriftHiveMetastore.Client, Iface): def __init__(self, iprot, oprot=None): hive_metastore.ThriftHiveMetastore.Client.__init__(self, iprot, oprot) def execute(self, query): """ Parameters: - query """ self.send_execute(query) self.recv_execute() def send_execute(self, query): self._oprot.writeMessageBegin('execute', TMessageType.CALL, self._seqid) args = execute_args() args.query = query args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_execute(self, ): (fname, mtype, rseqid) = self._iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(self._iprot) self._iprot.readMessageEnd() raise x result = execute_result() result.read(self._iprot) self._iprot.readMessageEnd() if result.ex is not None: raise result.ex return def fetchOne(self, ): self.send_fetchOne() return self.recv_fetchOne() def send_fetchOne(self, ): self._oprot.writeMessageBegin('fetchOne', TMessageType.CALL, self._seqid) args = fetchOne_args() args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_fetchOne(self, ): (fname, mtype, rseqid) = self._iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(self._iprot) self._iprot.readMessageEnd() raise x result = fetchOne_result() result.read(self._iprot) self._iprot.readMessageEnd() if result.success is not None: return result.success if result.ex is not None: raise result.ex raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchOne failed: unknown result"); def fetchN(self, numRows): """ Parameters: - numRows """ self.send_fetchN(numRows) return self.recv_fetchN() def send_fetchN(self, numRows): self._oprot.writeMessageBegin('fetchN', TMessageType.CALL, self._seqid) args = fetchN_args() args.numRows = numRows args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_fetchN(self, ): (fname, mtype, rseqid) = self._iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(self._iprot) self._iprot.readMessageEnd() raise x result = fetchN_result() result.read(self._iprot) self._iprot.readMessageEnd() if result.success is not None: return result.success if result.ex is not None: raise result.ex raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchN failed: unknown result"); def fetchAll(self, ): self.send_fetchAll() return self.recv_fetchAll() def send_fetchAll(self, ): self._oprot.writeMessageBegin('fetchAll', TMessageType.CALL, self._seqid) args = fetchAll_args() args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_fetchAll(self, ): (fname, mtype, rseqid) = self._iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(self._iprot) self._iprot.readMessageEnd() raise x result = fetchAll_result() result.read(self._iprot) self._iprot.readMessageEnd() if result.success is not None: return result.success if result.ex is not None: raise result.ex raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchAll failed: unknown result"); def getSchema(self, ): self.send_getSchema() return self.recv_getSchema() def send_getSchema(self, ): self._oprot.writeMessageBegin('getSchema', TMessageType.CALL, self._seqid) args = getSchema_args() args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getSchema(self, ): (fname, mtype, rseqid) = self._iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(self._iprot) self._iprot.readMessageEnd() raise x result = getSchema_result() result.read(self._iprot) self._iprot.readMessageEnd() if result.success is not None: return result.success if result.ex is not None: raise result.ex raise TApplicationException(TApplicationException.MISSING_RESULT, "getSchema failed: unknown result"); def getThriftSchema(self, ): self.send_getThriftSchema() return self.recv_getThriftSchema() def send_getThriftSchema(self, ): self._oprot.writeMessageBegin('getThriftSchema', TMessageType.CALL, self._seqid) args = getThriftSchema_args() args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getThriftSchema(self, ): (fname, mtype, rseqid) = self._iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(self._iprot) self._iprot.readMessageEnd() raise x result = getThriftSchema_result() result.read(self._iprot) self._iprot.readMessageEnd() if result.success is not None: return result.success if result.ex is not None: raise result.ex raise TApplicationException(TApplicationException.MISSING_RESULT, "getThriftSchema failed: unknown result"); def getClusterStatus(self, ): self.send_getClusterStatus() return self.recv_getClusterStatus() def send_getClusterStatus(self, ): self._oprot.writeMessageBegin('getClusterStatus', TMessageType.CALL, self._seqid) args = getClusterStatus_args() args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getClusterStatus(self, ): (fname, mtype, rseqid) = self._iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(self._iprot) self._iprot.readMessageEnd() raise x result = getClusterStatus_result() result.read(self._iprot) self._iprot.readMessageEnd() if result.success is not None: return result.success if result.ex is not None: raise result.ex raise TApplicationException(TApplicationException.MISSING_RESULT, "getClusterStatus failed: unknown result"); def getQueryPlan(self, ): self.send_getQueryPlan() return self.recv_getQueryPlan() def send_getQueryPlan(self, ): self._oprot.writeMessageBegin('getQueryPlan', TMessageType.CALL, self._seqid) args = getQueryPlan_args() args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getQueryPlan(self, ): (fname, mtype, rseqid) = self._iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(self._iprot) self._iprot.readMessageEnd() raise x result = getQueryPlan_result() result.read(self._iprot) self._iprot.readMessageEnd() if result.success is not None: return result.success if result.ex is not None: raise result.ex raise TApplicationException(TApplicationException.MISSING_RESULT, "getQueryPlan failed: unknown result"); def clean(self, ): self.send_clean() self.recv_clean() def send_clean(self, ): self._oprot.writeMessageBegin('clean', TMessageType.CALL, self._seqid) args = clean_args() args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_clean(self, ): (fname, mtype, rseqid) = self._iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(self._iprot) self._iprot.readMessageEnd() raise x result = clean_result() result.read(self._iprot) self._iprot.readMessageEnd() return class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor): def __init__(self, handler): hive_metastore.ThriftHiveMetastore.Processor.__init__(self, handler) self._processMap["execute"] = Processor.process_execute self._processMap["fetchOne"] = Processor.process_fetchOne self._processMap["fetchN"] = Processor.process_fetchN self._processMap["fetchAll"] = Processor.process_fetchAll self._processMap["getSchema"] = Processor.process_getSchema self._processMap["getThriftSchema"] = Processor.process_getThriftSchema self._processMap["getClusterStatus"] = Processor.process_getClusterStatus self._processMap["getQueryPlan"] = Processor.process_getQueryPlan self._processMap["clean"] = Processor.process_clean def process(self, iprot, oprot): (name, type, seqid) = iprot.readMessageBegin() if name not in self._processMap: iprot.skip(TType.STRUCT) iprot.readMessageEnd() x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) x.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() return else: self._processMap[name](self, seqid, iprot, oprot) return True def process_execute(self, seqid, iprot, oprot): args = execute_args() args.read(iprot) iprot.readMessageEnd() result = execute_result() try: self._handler.execute(args.query) except HiveServerException as ex: result.ex = ex oprot.writeMessageBegin("execute", TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_fetchOne(self, seqid, iprot, oprot): args = fetchOne_args() args.read(iprot) iprot.readMessageEnd() result = fetchOne_result() try: result.success = self._handler.fetchOne() except HiveServerException as ex: result.ex = ex oprot.writeMessageBegin("fetchOne", TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_fetchN(self, seqid, iprot, oprot): args = fetchN_args() args.read(iprot) iprot.readMessageEnd() result = fetchN_result() try: result.success = self._handler.fetchN(args.numRows) except HiveServerException as ex: result.ex = ex oprot.writeMessageBegin("fetchN", TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_fetchAll(self, seqid, iprot, oprot): args = fetchAll_args() args.read(iprot) iprot.readMessageEnd() result = fetchAll_result() try: result.success = self._handler.fetchAll() except HiveServerException as ex: result.ex = ex oprot.writeMessageBegin("fetchAll", TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getSchema(self, seqid, iprot, oprot): args = getSchema_args() args.read(iprot) iprot.readMessageEnd() result = getSchema_result() try: result.success = self._handler.getSchema() except HiveServerException as ex: result.ex = ex oprot.writeMessageBegin("getSchema", TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getThriftSchema(self, seqid, iprot, oprot): args = getThriftSchema_args() args.read(iprot) iprot.readMessageEnd() result = getThriftSchema_result() try: result.success = self._handler.getThriftSchema() except HiveServerException as ex: result.ex = ex oprot.writeMessageBegin("getThriftSchema", TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getClusterStatus(self, seqid, iprot, oprot): args = getClusterStatus_args() args.read(iprot) iprot.readMessageEnd() result = getClusterStatus_result() try: result.success = self._handler.getClusterStatus() except HiveServerException as ex: result.ex = ex oprot.writeMessageBegin("getClusterStatus", TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getQueryPlan(self, seqid, iprot, oprot): args = getQueryPlan_args() args.read(iprot) iprot.readMessageEnd() result = getQueryPlan_result() try: result.success = self._handler.getQueryPlan() except HiveServerException as ex: result.ex = ex oprot.writeMessageBegin("getQueryPlan", TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_clean(self, seqid, iprot, oprot): args = clean_args() args.read(iprot) iprot.readMessageEnd() result = clean_result() self._handler.clean() oprot.writeMessageBegin("clean", TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() # HELPER FUNCTIONS AND STRUCTURES class execute_args: """ Attributes: - query """ thrift_spec = ( None, # 0 (1, TType.STRING, 'query', None, None, ), # 1 ) def __init__(self, query=None,): self.query = query def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.query = iprot.readString(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('execute_args') if self.query is not None: oprot.writeFieldBegin('query', TType.STRING, 1) oprot.writeString(self.query) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class execute_result: """ Attributes: - ex """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ex', (HiveServerException, HiveServerException.thrift_spec), None, ), # 1 ) def __init__(self, ex=None,): self.ex = ex def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ex = HiveServerException() self.ex.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('execute_result') if self.ex is not None: oprot.writeFieldBegin('ex', TType.STRUCT, 1) self.ex.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class fetchOne_args: thrift_spec = ( ) def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('fetchOne_args') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class fetchOne_result: """ Attributes: - success - ex """ thrift_spec = ( (0, TType.STRING, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ex', (HiveServerException, HiveServerException.thrift_spec), None, ), # 1 ) def __init__(self, success=None, ex=None,): self.success = success self.ex = ex def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRING: self.success = iprot.readString(); else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ex = HiveServerException() self.ex.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('fetchOne_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRING, 0) oprot.writeString(self.success) oprot.writeFieldEnd() if self.ex is not None: oprot.writeFieldBegin('ex', TType.STRUCT, 1) self.ex.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class fetchN_args: """ Attributes: - numRows """ thrift_spec = ( None, # 0 (1, TType.I32, 'numRows', None, None, ), # 1 ) def __init__(self, numRows=None,): self.numRows = numRows def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.numRows = iprot.readI32(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('fetchN_args') if self.numRows is not None: oprot.writeFieldBegin('numRows', TType.I32, 1) oprot.writeI32(self.numRows) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class fetchN_result: """ Attributes: - success - ex """ thrift_spec = ( (0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0 (1, TType.STRUCT, 'ex', (HiveServerException, HiveServerException.thrift_spec), None, ), # 1 ) def __init__(self, success=None, ex=None,): self.success = success self.ex = ex def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.LIST: self.success = [] (_etype3, _size0) = iprot.readListBegin() for _i4 in xrange(_size0): _elem5 = iprot.readString(); self.success.append(_elem5) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ex = HiveServerException() self.ex.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('fetchN_result') if self.success is not None: oprot.writeFieldBegin('success', TType.LIST, 0) oprot.writeListBegin(TType.STRING, len(self.success)) for iter6 in self.success: oprot.writeString(iter6) oprot.writeListEnd() oprot.writeFieldEnd() if self.ex is not None: oprot.writeFieldBegin('ex', TType.STRUCT, 1) self.ex.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class fetchAll_args: thrift_spec = ( ) def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('fetchAll_args') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class fetchAll_result: """ Attributes: - success - ex """ thrift_spec = ( (0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0 (1, TType.STRUCT, 'ex', (HiveServerException, HiveServerException.thrift_spec), None, ), # 1 ) def __init__(self, success=None, ex=None,): self.success = success self.ex = ex def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.LIST: self.success = [] (_etype10, _size7) = iprot.readListBegin() for _i11 in xrange(_size7): _elem12 = iprot.readString(); self.success.append(_elem12) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ex = HiveServerException() self.ex.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('fetchAll_result') if self.success is not None: oprot.writeFieldBegin('success', TType.LIST, 0) oprot.writeListBegin(TType.STRING, len(self.success)) for iter13 in self.success: oprot.writeString(iter13) oprot.writeListEnd() oprot.writeFieldEnd() if self.ex is not None: oprot.writeFieldBegin('ex', TType.STRUCT, 1) self.ex.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getSchema_args: thrift_spec = ( ) def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getSchema_args') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getSchema_result: """ Attributes: - success - ex """ thrift_spec = ( (0, TType.STRUCT, 'success', (hive_metastore.ttypes.Schema, hive_metastore.ttypes.Schema.thrift_spec), None, ), # 0 (1, TType.STRUCT, 'ex', (HiveServerException, HiveServerException.thrift_spec), None, ), # 1 ) def __init__(self, success=None, ex=None,): self.success = success self.ex = ex def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = hive_metastore.ttypes.Schema() self.success.read(iprot) else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ex = HiveServerException() self.ex.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getSchema_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.ex is not None: oprot.writeFieldBegin('ex', TType.STRUCT, 1) self.ex.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getThriftSchema_args: thrift_spec = ( ) def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getThriftSchema_args') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getThriftSchema_result: """ Attributes: - success - ex """ thrift_spec = ( (0, TType.STRUCT, 'success', (hive_metastore.ttypes.Schema, hive_metastore.ttypes.Schema.thrift_spec), None, ), # 0 (1, TType.STRUCT, 'ex', (HiveServerException, HiveServerException.thrift_spec), None, ), # 1 ) def __init__(self, success=None, ex=None,): self.success = success self.ex = ex def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = hive_metastore.ttypes.Schema() self.success.read(iprot) else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ex = HiveServerException() self.ex.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getThriftSchema_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.ex is not None: oprot.writeFieldBegin('ex', TType.STRUCT, 1) self.ex.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getClusterStatus_args: thrift_spec = ( ) def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getClusterStatus_args') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getClusterStatus_result: """ Attributes: - success - ex """ thrift_spec = ( (0, TType.STRUCT, 'success', (HiveClusterStatus, HiveClusterStatus.thrift_spec), None, ), # 0 (1, TType.STRUCT, 'ex', (HiveServerException, HiveServerException.thrift_spec), None, ), # 1 ) def __init__(self, success=None, ex=None,): self.success = success self.ex = ex def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = HiveClusterStatus() self.success.read(iprot) else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ex = HiveServerException() self.ex.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getClusterStatus_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.ex is not None: oprot.writeFieldBegin('ex', TType.STRUCT, 1) self.ex.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getQueryPlan_args: thrift_spec = ( ) def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getQueryPlan_args') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getQueryPlan_result: """ Attributes: - success - ex """ thrift_spec = ( (0, TType.STRUCT, 'success', (queryplan.ttypes.QueryPlan, queryplan.ttypes.QueryPlan.thrift_spec), None, ), # 0 (1, TType.STRUCT, 'ex', (HiveServerException, HiveServerException.thrift_spec), None, ), # 1 ) def __init__(self, success=None, ex=None,): self.success = success self.ex = ex def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = queryplan.ttypes.QueryPlan() self.success.read(iprot) else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ex = HiveServerException() self.ex.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getQueryPlan_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.ex is not None: oprot.writeFieldBegin('ex', TType.STRUCT, 1) self.ex.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class clean_args: thrift_spec = ( ) def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('clean_args') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class clean_result: thrift_spec = ( ) def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('clean_result') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) import factory from time import time from decimal import Decimal as D from django.utils.timezone import now from django.db.models import get_model from oscar.core.loading import get_class from oscar_mws import MWS_MARKETPLACE_US Selector = get_class('partner.strategy', 'Selector') class UserFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('auth', 'User') first_name = 'Peter' last_name = 'Griffin' email = 'peter@petoria.pt' password = 'plaintext' class CountryFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('address', 'Country') iso_3166_1_a2 = factory.Iterator(['US', 'GB', 'DE']) iso_3166_1_a3 = factory.Iterator(['USA', 'GBR', 'DEU']) iso_3166_1_numeric = factory.Iterator(['840', '276', '826']) class ProductClassFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('catalogue', 'ProductClass') name = factory.Sequence(lambda n: 'Dummy product class {}'.format(n)) class BasketFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('basket', 'Basket') strategy = Selector().strategy() class AmazonProfileFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('oscar_mws', 'AmazonProfile') FACTORY_DJANGO_GET_OR_CREATE = ('product',) sku = factory.Sequence(lambda n: "sku_{}".format(str(time())[:10])) release_date = now() product = factory.SubFactory( 'oscar_mws.test.factories.ProductFactory', amazon_profile=None) class ProductFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('catalogue', 'Product') title = 'Dummy Product' product_class = factory.SubFactory(ProductClassFactory) amazon_profile = factory.RelatedFactory(AmazonProfileFactory, 'product') @factory.post_generation def stockrecord(self, created, extracted, **kwargs): if not created: return if not extracted: kwargs.setdefault('product', self) extracted = StockRecordFactory(**kwargs) self.stockrecords.add(extracted) class MerchantAccountFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('oscar_mws', 'MerchantAccount') name = "Dummy Merchant" seller_id = 'ASLLRIDHERE1J56' aws_api_key = 'FAKE_KEY' aws_api_secret = 'FAKE_SECRET' class AmazonMarketplaceFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('oscar_mws', 'AmazonMarketplace') name = "Dummy Marketplace" region = MWS_MARKETPLACE_US marketplace_id = factory.Sequence(lambda n: 'MWS_MKT_{}'.format(n)) merchant = factory.SubFactory(MerchantAccountFactory) class FeedSubmissionFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('oscar_mws', 'FeedSubmission') FACTORY_DJANGO_GET_OR_CREATE = ('submission_id',) merchant = factory.SubFactory(MerchantAccountFactory) date_submitted = now() class PartnerFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('partner', 'Partner') name = factory.Sequence(lambda n: 'Dummy partner {}'.format(n)) class StockRecordFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('partner', 'StockRecord') price_excl_tax = D('12.99') partner = factory.SubFactory(PartnerFactory) product = factory.SubFactory(ProductFactory) class ShippingAddressFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('order', 'ShippingAddress') first_name = 'Peter' last_name = 'Griffin' line1 = '31 Spooner Street' line4 = 'Quahog' state = 'RI' country = factory.SubFactory(CountryFactory) postcode = '12345' class OrderLineFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('order', 'Line') product = factory.SubFactory(ProductFactory) line_price_excl_tax = D('12.99') line_price_incl_tax = D('12.99') line_price_before_discounts_incl_tax = D('12.99') line_price_before_discounts_excl_tax = D('12.99') class OrderFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('order', 'Order') number = factory.Sequence(lambda n: "{}".format(10000 + n)) site = factory.LazyAttribute( lambda a: get_model('sites', 'Site').objects.all()[0] ) total_incl_tax = D('12.99') total_excl_tax = D('12.99') shipping_address = factory.SubFactory(ShippingAddressFactory) class FulfillmentOrderFactory(factory.DjangoModelFactory): FACTORY_FOR = get_model('oscar_mws', 'FulfillmentOrder') fulfillment_id = 'extern_id_1154539615776' merchant = factory.SubFactory(MerchantAccountFactory) date_updated = now() order = factory.SubFactory(OrderFactory) shipping_address = factory.SubFactory(ShippingAddressFactory) from decimal import Decimal from django.db.models import Q, Count from django.utils.encoding import smart_unicode from restlib2.http import codes from restlib2.params import Parametizer, StrParam, BoolParam, IntParam from modeltree.tree import MODELTREE_DEFAULT_ALIAS, trees from avocado.events import usage from avocado.models import DataField from avocado.query import pipeline from avocado.stats import kmeans from .base import FieldBase MINIMUM_OBSERVATIONS = 500 MAXIMUM_OBSERVATIONS = 50000 class FieldDimsParametizer(Parametizer): aware = BoolParam(False) cluster = BoolParam(True) n = IntParam() nulls = BoolParam(False) processor = StrParam('default', choices=pipeline.query_processors) sort = StrParam() tree = StrParam(MODELTREE_DEFAULT_ALIAS, choices=trees) class FieldDimensions(FieldBase): "Field Counts Resource" parametizer = FieldDimsParametizer def get(self, request, pk): instance = self.get_object(request, pk=pk) params = self.get_params(request) tree = trees[params.get('tree')] opts = tree.root_model._meta tree_field = DataField(pk='{0}:{1}'.format(params.get('tree'), pk), app_name=opts.app_label, model_name=opts.module_name, field_name=opts.pk.name) # This will eventually make its way in the parametizer, but lists # are not supported. dimensions = request.GET.getlist('dimensions') if params['aware']: context = self.get_context(request) else: context = None QueryProcessor = pipeline.query_processors[params['processor']] processor = QueryProcessor(context=context, tree=tree) queryset = processor.get_queryset(request=request) # Explicit fields to group by, ignore ones that dont exist or the # user does not have permission to view. Default is to group by the # reference field for disinct counts. if any(dimensions): fields = [] groupby = [] for pk in dimensions: f = self.get_object(request, pk=pk) if f: fields.append(f) groupby.append(tree.query_string_for_field(f.field, model=f.model)) else: fields = [instance] groupby = [tree.query_string_for_field(instance.field, model=instance.model)] # Exclude null values. Depending on the downstream use of the data, # nulls may or may not be desirable. if not params['nulls']: q = Q() for field in groupby: q = q & Q(**{'{0}__isnull'.format(field): False}) queryset = queryset.filter(q) queryset = queryset.values(*groupby) # Begin constructing the response resp = { 'data': [], 'outliers': [], 'clustered': False, 'size': 0, } queryset = queryset.annotate(count=Count(tree_field.field.name))\ .values_list('count', *groupby) # Evaluate list of points length = len(queryset) # Nothing to do if not length: usage.log('dims', instance=instance, request=request, data={ 'size': 0, 'clustered': False, 'aware': params['aware'], }) return resp if length > MAXIMUM_OBSERVATIONS: data = { 'message': 'Data too large', } return self.render(request, data, status=codes.unprocessable_entity) # Apply ordering. If any of the fields are enumerable, ordering should # be relative to those fields. For continuous data, the ordering is # relative to the count of each group if (any([d.enumerable for d in fields]) and not params['sort'] == 'count'): queryset = queryset.order_by(*groupby) else: queryset = queryset.order_by('-count') clustered = False points = [{ 'count': point[0], 'values': point[1:], } for point in list(queryset)] outliers = [] # For N-dimensional continuous data, check if clustering should occur # to down-sample the data. if all([d.simple_type == 'number' for d in fields]): # Extract observations for clustering. obs = [] null_points = [] numeric_points = [] for i, point in enumerate(points): # We need to handle points that have null dimensions # differently than those that are all numeric as the kmeans # module currently cannot handle mixed type dimensions so we # only allow fully numeric points to be passed to the kmeans # module. if None in point['values']: null_points.append(point) continue for i, dim in enumerate(point['values']): if isinstance(dim, Decimal): point['values'][i] = float(str(dim)) numeric_points.append(point) obs.append(point['values']) # Perform k-means clustering. Determine centroids and calculate # the weighted count relatives to the centroid and observations # within the kmeans module. if params['cluster'] and length >= MINIMUM_OBSERVATIONS: clustered = True counts = [p['count'] for p in numeric_points] points, outliers = kmeans.weighted_counts( obs, counts, params['n']) else: indexes = kmeans.find_outliers(obs, normalized=False) outliers = [] for idx in indexes: outliers.append(numeric_points[idx]) numeric_points[idx] = None points = [p for p in numeric_points if p is not None] # Now that we have done the analysis using the purely numeric # points, we can add the mixed/null dimensionality points back in # to the list before returning results. points += null_points usage.log('dims', instance=instance, request=request, data={ 'size': length, 'clustered': clustered, 'aware': params['aware'], }) labeled_points = [] value_labels = tree_field.value_labels(queryset=queryset) for point in points: labeled_points.append({ 'count': point['count'], 'values': [{ 'label': value_labels.get(value, smart_unicode(value)), 'value': value } for value in point['values']] }) return { 'data': labeled_points, 'clustered': clustered, 'outliers': outliers, 'size': length, } r''' Copyright 2014 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' from collections import namedtuple import logging import socket import select import sys import ssl import time import urllib from nogotofail.mitm.connection import handlers from nogotofail.mitm.connection.handlers import preconditions from nogotofail.mitm.util import close_quietly Application = namedtuple("Application", ["package", "version"]) class Client(object): """Class representing a blame client connection. NOTE: You should avoid using this directly for client queries because if the client reconnects a new Client will be made.""" CLIENT_TIMEOUT = 21600 class Callback(object): def __init__(self, fn, timeout, now=None): self.fn = fn self.timeout = timeout self.start = now or time.time() def __init__(self, socket, server, now=None): self.socket = socket self.server = server self.info = None self.last_used = now or time.time() if isinstance(socket, ssl.SSLSocket): self._select_fn = self._ssl_handshake_select_fn else: self._select_fn = self._handshake_select_fn self.queries = {} self._txid = 0 self._buffer = "" self.address = self.socket.getpeername()[0] self.logger = logging.getLogger("nogotofail.mitm") self._handshake_completed = False @property def available(self): """Returns if the client is currently available.""" return self._handshake_completed @property def next_txid(self): """Returns the next unused transaction id for a blame request.""" id = self._txid self._txid += 1 return id def on_select(self): """Should be called when select has returned self.socket as ready for reading.""" self.last_used = time.time() return self._select_fn() def check_timeouts(self): """Returns if the connection or any of its callbacks have timed out.""" now = time.time if now - self.last_used > self.CLIENT_TIMEOUT: return False for callback in self.queries.values(): if now >= callback.start + callback.timeout and callback.timeout != 0: return False return True def close(self): """Close the connection to the client. This also notifies all pending queries that their request has failed.""" close_quietly(self.socket) for callback in self.queries.values(): callback.fn(False) def get_applications_async( self, client_port, server_addr, server_port, callback, timeout=10): """See Server.get_applications_async""" self.last_used = time.time() txid = self.next_txid family = socket.AF_INET6 if ":" in server_addr else socket.AF_INET message = ( unicode( "%d tcp_client_id %s %s %s\n" % (txid, client_port, socket.inet_pton(family, server_addr).encode("hex"), server_port))) try: self.socket.sendall(message) except socket.error as e: self.logger.info( "Blame: Error sending vuln_notify to %s: %s." % (self.address, e)) return False self.queries[txid] = Client.Callback( self._generate_on_get_applications_fn(callback), timeout) return True def vuln_notify_async(self, server_addr, server_port, id, type, applications, callback, timeout=10): """See Server.vuln_notify_async.""" self.last_used = time.time() txid = self.next_txid message = unicode("%d vuln_notify %s %s %s %d %s\n" % (txid, id, type, server_addr, server_port, ", ".join( ["%s %s" % (urllib.quote(app.package), app.version) for app in applications]))) try: self.socket.sendall(message) except socket.error as e: self.logger.info("AppBlame notify error for %s, %s." % (self.address, e)) return False self.queries[txid] = Client.Callback( self._generate_on_vuln_notify_fn(callback), timeout) return True def _generate_on_vuln_notify_fn(self, callback): def on_vuln_notify(success, data=None): if not success: callback(False) self.server.remove_client(self.address) return callback(True, data == "OK") return on_vuln_notify def _generate_on_get_applications_fn(self, callback): def on_get_applications(success, data=None): if not success: callback(False) self.server.remove_client(self.address) return platform_info = self.info.get( "Platform-Info", "Unknown") apps = data.split(",") try: callback(True, platform_info, [Application(*map(urllib.unquote, app.strip().split(" ", 1))) for app in apps]) except (ValueError, TypeError): callback(False) return on_get_applications def _ssl_handshake_select_fn(self): self.socket.setblocking(False) try: self.socket.do_handshake() except socket.error: return True self.socket.setblocking(True) self._select_fn = self._handshake_select_fn return True def _handshake_select_fn(self): """Handle client data during the handshake.""" try: data = self.socket.recv(8192) except socket.error: self.logger.info("Blame: Erorr reading from client %s.", self.address) return False if not data: self.logger.info("Blame: Client %s closed connection.", self.address) return False data = self._buffer + data lines = data.split("\n") # Check if there is still more data to be read. # Some clients send \r\n line endings and some \n, so strip extra # whitespace. if lines[-1].strip() != "": self._buffer = data return data = data.replace("\r", "") lines = data[:data.index("\n\n")].split("\n") try: self._parse_headers(lines) self._send_headers() except (ValueError, KeyError, IndexError, socket.error) as e: try: self.socket.sendall("400 Error parsing message\n\n") except socket.error: pass self.logger.info("Blame: Bad handshake from %s: %s" % (self.address, e)) return False # TODO: Handle any extra data after the handshake, there shouldn't be # any in the current version of the protocol. # Done! self.logger.info("Blame: New client from %s", self.address) self._select_fn = self._response_select_fn self._handshake_completed = True return True def _send_headers(self): # Send the OK self.socket.sendall("0 OK\n") # Send the configs prob = self.info.get("Attack-Probability", self.server.default_prob) self.socket.sendall("Attack-Probability: %f\n" % prob) attacks = self.info.get("Attacks", self.server.default_attacks) attacks_str = ",".join([attack.name for attack in attacks]) self.socket.sendall("Attacks: %s\n" % attacks_str) supported_str = ",".join([ attack for attack in handlers.connection.handlers.map]) self.socket.sendall("Supported-Attacks: %s\n" % supported_str) data = self.info.get("Data-Attacks", self.server.default_data) data_str = ",".join([attack.name for attack in data]) self.socket.sendall("Data-Attacks: %s\n" % data_str) supported_data = ",".join([ attack for attack in handlers.data.handlers.map]) self.socket.sendall("Supported-Data-Attacks: %s\n" % supported_data) self.socket.sendall("\n") def _parse_headers(self, lines): raw_headers = [line.split(":", 1) for line in lines[1:]] headers = {entry.strip(): header.strip() for entry, header in raw_headers} client_info = {} # Platform-Info is required, fail if not present client_info["Platform-Info"] = headers["Platform-Info"] # Everything else is optional if "Installation-ID" in headers: client_info["Installation-ID"] = headers["Installation-ID"] if "Attack-Probability" in headers: value = float(headers["Attack-Probability"]) if value < 0 or value > 1.0: raise ValueError("Attack-Probability outside range") client_info["Attack-Probability"] = value if "Attacks" in headers: attacks = headers["Attacks"].split(",") attacks = map(str.strip, attacks) client_info["Attacks"] = preconditions.filter_preconditions([ handlers.connection.handlers.map[attack] for attack in attacks if attack in handlers.connection.handlers.map]) if "Data-Attacks" in headers: attacks = headers["Data-Attacks"].split(",") attacks = map(str.strip, attacks) client_info["Data-Attacks"] = preconditions.filter_preconditions( [handlers.data.handlers.map[attack] for attack in attacks if attack in handlers.data.handlers.map]) # Store the raw headers as well in case a handler needs something the # client sent in an additional header client_info["headers"] = headers self.info = client_info def _response_select_fn(self): try: data = self.socket.recv(8192) except socket.error: self.logger.info("Blame: Erorr reading from client %s.", self.address) return False if not data: self.logger.info("Blame: Client %s closed connection", self.address) return False data = self._buffer + data while "\n" in data: line, rest = data.split("\n", 1) self._handle_client_line(line) data = rest self._buffer = data return True def _handle_client_line(self, line): # A response is either "id \n" or "id\n" if the command failed. words = line.strip().split(" ") txid = int(words[0]) data = " ".join(words[1:]) callback = self.queries.get(txid) if callback: del self.queries[txid] callback.fn(True, data) else: self.logger.debug("Blame: Response for unknown txid %d from %s", txid, self.address) class Server: """Server for managing connections to the connection blaming app on devices.""" port = None clients = None def __init__(self, port, cert, default_prob, default_attacks, default_data): self.txid = 0 self.kill = False self.port = port self.cert = cert self.default_prob = default_prob self.default_attacks = default_attacks self.default_data = default_data self.clients = {} self.fd_map = {} self.logger = logging.getLogger("nogotofail.mitm") self.server_socket = None def start_listening(self): self.server_socket = socket.socket() self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind(("", self.port)) self.server_socket.listen(5) self.server_socket.settimeout(2) if self.cert: self.server_socket = ( ssl.wrap_socket( self.server_socket, certfile=self.cert, server_side=True, do_handshake_on_connect=False)) def _on_server_socket_select(self): try: (client_socket, client_address) = self.server_socket.accept() except socket.error: # In a wrapped SSL socket accept() can raise exceptions, if we get # one the client connection is broken so do nothing. return client_addr, client_port = client_address self.logger.debug("Blame: Connection from %s:%d", client_addr, client_port) old_client = self.clients.get(client_addr, None) if old_client: self.remove_client(client_address) self.fd_map[client_socket] = client_addr self.clients[client_addr] = Client(client_socket, self) def _on_socket_select(self, sock): if sock is self.server_socket: self._on_server_socket_select() return client_addr = self.fd_map[sock] client = self.clients[client_addr] if not client.on_select(): self.remove_client(client_addr) def client_available(self, client_addr): """Returns if the app blame client is running on client_addr. This is best effort only, it may return True for lost clients. """ return client_addr in self.clients and self.clients[client_addr].available def get_applications_async( self, client_addr, client_port, server_addr, server_port, callback, timeout=10): """Fetch the application information for a given connection tuple calling a callback when the response is received. Returns if the request was sent to the client. NOTE: If False is returned the callback will never be called. Arguments: client_addr -- the client ip address to query client_port -- the source port on the client server_addr -- the destination ip address as seen by the client server_port -- the destination port as seen by the client callback -- function to call when data is ready, should be of the form def fn(success, platform_info=None, applications=None) timeout -- timeout for the request""" if not self.client_available(client_addr): return False if not self.clients[client_addr].get_applications_async(client_port, server_addr, server_port, callback, timeout): self.remove_client(client_addr) return False return True def vuln_notify_async(self, client_addr, server_addr, server_port, id, type, applications, callback, timeout=10): """Send a notification to client_addr of a vulnerability in applications. Returns if the notification was sent successfully Arguments: client_addr -- Client to notify server_addr -- remote destination of the vulnerable connection server_port -- remote port of the vulnerable connection id -- An opaque blob to identify the connection later on type -- Type of vuln. See nogotofail.mitm.util.vuln.* applications -- List of Applications to blame callback -- Function to call when a response is received. Should be of the form: def callback(success, result=False) success -- If the client responded to the notification result -- If the client showed the vulnerability """ if not self.client_available(client_addr): return False result = self.clients[client_addr].vuln_notify_async(server_addr, server_port, id, type, applications, callback, timeout) if not result: self.remove_client(client_addr) return result def remove_client(self, client_addr): """Remove and close a blame client.""" if client_addr not in self.clients: return client = self.clients[client_addr] del self.clients[client_addr] del self.fd_map[client.socket] client.close() def check_timeouts(self): """Check the timeouts on all clients and remove those that have timed out.""" for client_addr in self.clients.keys(): if not self.clients[client_addr].check_timeouts(): self.logger.info("Blame: Client %s timed out", client_addr) self.remove_client(client_addr) @property def select_fds(self): """Returns the tuple of r,w,x fds to be sent to select.""" return (set([client.socket for client in self.clients.values()] + [self.server_socket]) , set(), set()) def on_select(self, r, w, x): """Called whith the results of select.select. Note that all r,w,x is a subset of the values provided by select_fds.""" for fd in set(r + w + x): self._on_socket_select(fd) def shutdown(self): """Shutdown the Blame server. The server should not be used after this point.""" self.server_socket.close() for client in self.clients.values(): try: client.close() except: pass """Unit tests for Superset""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import json import mock import unittest from superset import db, models, sm, security from superset.models import core as models from superset.connectors.connector_registry import ConnectorRegistry from superset.connectors.sqla.models import SqlaTable from superset.connectors.druid.models import DruidDatasource from .base_tests import SupersetTestCase ROLE_TABLES_PERM_DATA = { 'role_name': 'override_me', 'database': [{ 'datasource_type': 'table', 'name': 'main', 'schema': [{ 'name': '', 'datasources': ['birth_names'] }] }] } ROLE_ALL_PERM_DATA = { 'role_name': 'override_me', 'database': [{ 'datasource_type': 'table', 'name': 'main', 'schema': [{ 'name': '', 'datasources': ['birth_names'] }] }, { 'datasource_type': 'druid', 'name': 'druid_test', 'schema': [{ 'name': '', 'datasources': ['druid_ds_1', 'druid_ds_2'] }] } ] } EXTEND_ROLE_REQUEST = ( '/superset/approve?datasource_type={}&datasource_id={}&' 'created_by={}&role_to_extend={}') GRANT_ROLE_REQUEST = ( '/superset/approve?datasource_type={}&datasource_id={}&' 'created_by={}&role_to_grant={}') TEST_ROLE_1 = 'test_role1' TEST_ROLE_2 = 'test_role2' DB_ACCESS_ROLE = 'db_access_role' SCHEMA_ACCESS_ROLE = 'schema_access_role' def create_access_request(session, ds_type, ds_name, role_name, user_name): ds_class = ConnectorRegistry.sources[ds_type] # TODO: generalize datasource names if ds_type == 'table': ds = session.query(ds_class).filter( ds_class.table_name == ds_name).first() else: ds = session.query(ds_class).filter( ds_class.datasource_name == ds_name).first() ds_perm_view = sm.find_permission_view_menu( 'datasource_access', ds.perm) sm.add_permission_role(sm.find_role(role_name), ds_perm_view) access_request = models.DatasourceAccessRequest( datasource_id=ds.id, datasource_type=ds_type, created_by_fk=sm.find_user(username=user_name).id, ) session.add(access_request) session.commit() return access_request class RequestAccessTests(SupersetTestCase): requires_examples = False @classmethod def setUpClass(cls): sm.add_role('override_me') sm.add_role(TEST_ROLE_1) sm.add_role(TEST_ROLE_2) sm.add_role(DB_ACCESS_ROLE) sm.add_role(SCHEMA_ACCESS_ROLE) db.session.commit() @classmethod def tearDownClass(cls): override_me = sm.find_role('override_me') db.session.delete(override_me) db.session.delete(sm.find_role(TEST_ROLE_1)) db.session.delete(sm.find_role(TEST_ROLE_2)) db.session.delete(sm.find_role(DB_ACCESS_ROLE)) db.session.delete(sm.find_role(SCHEMA_ACCESS_ROLE)) db.session.commit() def setUp(self): self.login('admin') def tearDown(self): self.logout() override_me = sm.find_role('override_me') override_me.permissions = [] db.session.commit() db.session.close() def test_override_role_permissions_is_admin_only(self): self.logout() self.login('alpha') response = self.client.post( '/superset/override_role_permissions/', data=json.dumps(ROLE_TABLES_PERM_DATA), content_type='application/json', follow_redirects=True) self.assertNotEquals(405, response.status_code) def test_override_role_permissions_1_table(self): response = self.client.post( '/superset/override_role_permissions/', data=json.dumps(ROLE_TABLES_PERM_DATA), content_type='application/json') self.assertEquals(201, response.status_code) updated_override_me = sm.find_role('override_me') self.assertEquals(1, len(updated_override_me.permissions)) birth_names = self.get_table_by_name('birth_names') self.assertEquals( birth_names.perm, updated_override_me.permissions[0].view_menu.name) self.assertEquals( 'datasource_access', updated_override_me.permissions[0].permission.name) def test_override_role_permissions_druid_and_table(self): response = self.client.post( '/superset/override_role_permissions/', data=json.dumps(ROLE_ALL_PERM_DATA), content_type='application/json') self.assertEquals(201, response.status_code) updated_role = sm.find_role('override_me') perms = sorted( updated_role.permissions, key=lambda p: p.view_menu.name) druid_ds_1 = self.get_druid_ds_by_name('druid_ds_1') self.assertEquals(druid_ds_1.perm, perms[0].view_menu.name) self.assertEquals('datasource_access', perms[0].permission.name) druid_ds_2 = self.get_druid_ds_by_name('druid_ds_2') self.assertEquals(druid_ds_2.perm, perms[1].view_menu.name) self.assertEquals( 'datasource_access', updated_role.permissions[1].permission.name) birth_names = self.get_table_by_name('birth_names') self.assertEquals(birth_names.perm, perms[2].view_menu.name) self.assertEquals( 'datasource_access', updated_role.permissions[2].permission.name) self.assertEquals(3, len(perms)) def test_override_role_permissions_drops_absent_perms(self): override_me = sm.find_role('override_me') override_me.permissions.append( sm.find_permission_view_menu( view_menu_name=self.get_table_by_name('long_lat').perm, permission_name='datasource_access') ) db.session.flush() response = self.client.post( '/superset/override_role_permissions/', data=json.dumps(ROLE_TABLES_PERM_DATA), content_type='application/json') self.assertEquals(201, response.status_code) updated_override_me = sm.find_role('override_me') self.assertEquals(1, len(updated_override_me.permissions)) birth_names = self.get_table_by_name('birth_names') self.assertEquals( birth_names.perm, updated_override_me.permissions[0].view_menu.name) self.assertEquals( 'datasource_access', updated_override_me.permissions[0].permission.name) def test_clean_requests_after_role_extend(self): session = db.session # Case 1. Gamma and gamma2 requested test_role1 on energy_usage access # Gamma already has role test_role1 # Extend test_role1 with energy_usage access for gamma2 # Check if access request for gamma at energy_usage was deleted # gamma2 and gamma request table_role on energy usage access_request1 = create_access_request( session, 'table', 'random_time_series', TEST_ROLE_1, 'gamma2') ds_1_id = access_request1.datasource_id access_request2 = create_access_request( session, 'table', 'random_time_series', TEST_ROLE_1, 'gamma') access_requests = self.get_access_requests('gamma', 'table', ds_1_id) self.assertTrue(access_requests) # gamma gets test_role1 self.get_resp(GRANT_ROLE_REQUEST.format( 'table', ds_1_id, 'gamma', TEST_ROLE_1)) # extend test_role1 with access on energy usage self.client.get(EXTEND_ROLE_REQUEST.format( 'table', ds_1_id, 'gamma2', TEST_ROLE_1)) access_requests = self.get_access_requests('gamma', 'table', ds_1_id) self.assertFalse(access_requests) gamma_user = sm.find_user(username='gamma') gamma_user.roles.remove(sm.find_role('test_role1')) def test_clean_requests_after_alpha_grant(self): session = db.session # Case 2. Two access requests from gamma and gamma2 # Gamma becomes alpha, gamma2 gets granted # Check if request by gamma has been deleted access_request1 = create_access_request( session, 'table', 'birth_names', TEST_ROLE_1, 'gamma') access_request2 = create_access_request( session, 'table', 'birth_names', TEST_ROLE_2, 'gamma2') ds_1_id = access_request1.datasource_id # gamma becomes alpha alpha_role = sm.find_role('Alpha') gamma_user = sm.find_user(username='gamma') gamma_user.roles.append(alpha_role) session.commit() access_requests = self.get_access_requests('gamma', 'table', ds_1_id) self.assertTrue(access_requests) self.client.get(EXTEND_ROLE_REQUEST.format( 'table', ds_1_id, 'gamma2', TEST_ROLE_2)) access_requests = self.get_access_requests('gamma', 'table', ds_1_id) self.assertFalse(access_requests) gamma_user = sm.find_user(username='gamma') gamma_user.roles.remove(sm.find_role('Alpha')) session.commit() def test_clean_requests_after_db_grant(self): session = db.session # Case 3. Two access requests from gamma and gamma2 # Gamma gets database access, gamma2 access request granted # Check if request by gamma has been deleted gamma_user = sm.find_user(username='gamma') access_request1 = create_access_request( session, 'table', 'long_lat', TEST_ROLE_1, 'gamma') access_request2 = create_access_request( session, 'table', 'long_lat', TEST_ROLE_2, 'gamma2') ds_1_id = access_request1.datasource_id # gamma gets granted database access database = session.query(models.Database).first() security.merge_perm( sm, 'database_access', database.perm) ds_perm_view = sm.find_permission_view_menu( 'database_access', database.perm) sm.add_permission_role( sm.find_role(DB_ACCESS_ROLE) , ds_perm_view) gamma_user.roles.append(sm.find_role(DB_ACCESS_ROLE)) session.commit() access_requests = self.get_access_requests('gamma', 'table', ds_1_id) self.assertTrue(access_requests) # gamma2 request gets fulfilled self.client.get(EXTEND_ROLE_REQUEST.format( 'table', ds_1_id, 'gamma2', TEST_ROLE_2)) access_requests = self.get_access_requests('gamma', 'table', ds_1_id) self.assertFalse(access_requests) gamma_user = sm.find_user(username='gamma') gamma_user.roles.remove(sm.find_role(DB_ACCESS_ROLE)) session.commit() def test_clean_requests_after_schema_grant(self): session = db.session # Case 4. Two access requests from gamma and gamma2 # Gamma gets schema access, gamma2 access request granted # Check if request by gamma has been deleted gamma_user = sm.find_user(username='gamma') access_request1 = create_access_request( session, 'table', 'wb_health_population', TEST_ROLE_1, 'gamma') access_request2 = create_access_request( session, 'table', 'wb_health_population', TEST_ROLE_2, 'gamma2') ds_1_id = access_request1.datasource_id ds = session.query(SqlaTable).filter_by( table_name='wb_health_population').first() ds.schema = 'temp_schema' security.merge_perm( sm, 'schema_access', ds.schema_perm) schema_perm_view = sm.find_permission_view_menu( 'schema_access', ds.schema_perm) sm.add_permission_role( sm.find_role(SCHEMA_ACCESS_ROLE) , schema_perm_view) gamma_user.roles.append(sm.find_role(SCHEMA_ACCESS_ROLE)) session.commit() # gamma2 request gets fulfilled self.client.get(EXTEND_ROLE_REQUEST.format( 'table', ds_1_id, 'gamma2', TEST_ROLE_2)) access_requests = self.get_access_requests('gamma', 'table', ds_1_id) self.assertFalse(access_requests) gamma_user = sm.find_user(username='gamma') gamma_user.roles.remove(sm.find_role(SCHEMA_ACCESS_ROLE)) ds = session.query(SqlaTable).filter_by( table_name='wb_health_population').first() ds.schema = None session.commit() @mock.patch('superset.utils.send_MIME_email') def test_approve(self, mock_send_mime): session = db.session TEST_ROLE_NAME = 'table_role' sm.add_role(TEST_ROLE_NAME) # Case 1. Grant new role to the user. access_request1 = create_access_request( session, 'table', 'unicode_test', TEST_ROLE_NAME, 'gamma') ds_1_id = access_request1.datasource_id resp = self.get_resp(GRANT_ROLE_REQUEST.format( 'table', ds_1_id, 'gamma', TEST_ROLE_NAME)) # Test email content. self.assertTrue(mock_send_mime.called) call_args = mock_send_mime.call_args[0] self.assertEqual([sm.find_user(username='gamma').email, sm.find_user(username='admin').email], call_args[1]) self.assertEqual( '[Superset] Access to the datasource {} was granted'.format( self.get_table(ds_1_id).full_name), call_args[2]['Subject']) self.assertIn(TEST_ROLE_NAME, call_args[2].as_string()) self.assertIn('unicode_test', call_args[2].as_string()) access_requests = self.get_access_requests('gamma', 'table', ds_1_id) # request was removed self.assertFalse(access_requests) # user was granted table_role user_roles = [r.name for r in sm.find_user('gamma').roles] self.assertIn(TEST_ROLE_NAME, user_roles) # Case 2. Extend the role to have access to the table access_request2 = create_access_request( session, 'table', 'long_lat', TEST_ROLE_NAME, 'gamma') ds_2_id = access_request2.datasource_id long_lat_perm = access_request2.datasource.perm self.client.get(EXTEND_ROLE_REQUEST.format( 'table', access_request2.datasource_id, 'gamma', TEST_ROLE_NAME)) access_requests = self.get_access_requests('gamma', 'table', ds_2_id) # Test email content. self.assertTrue(mock_send_mime.called) call_args = mock_send_mime.call_args[0] self.assertEqual([sm.find_user(username='gamma').email, sm.find_user(username='admin').email], call_args[1]) self.assertEqual( '[Superset] Access to the datasource {} was granted'.format( self.get_table(ds_2_id).full_name), call_args[2]['Subject']) self.assertIn(TEST_ROLE_NAME, call_args[2].as_string()) self.assertIn('long_lat', call_args[2].as_string()) # request was removed self.assertFalse(access_requests) # table_role was extended to grant access to the long_lat table/ perm_view = sm.find_permission_view_menu( 'datasource_access', long_lat_perm) TEST_ROLE = sm.find_role(TEST_ROLE_NAME) self.assertIn(perm_view, TEST_ROLE.permissions) # Case 3. Grant new role to the user to access the druid datasource. sm.add_role('druid_role') access_request3 = create_access_request( session, 'druid', 'druid_ds_1', 'druid_role', 'gamma') self.get_resp(GRANT_ROLE_REQUEST.format( 'druid', access_request3.datasource_id, 'gamma', 'druid_role')) # user was granted table_role user_roles = [r.name for r in sm.find_user('gamma').roles] self.assertIn('druid_role', user_roles) # Case 4. Extend the role to have access to the druid datasource access_request4 = create_access_request( session, 'druid', 'druid_ds_2', 'druid_role', 'gamma') druid_ds_2_perm = access_request4.datasource.perm self.client.get(EXTEND_ROLE_REQUEST.format( 'druid', access_request4.datasource_id, 'gamma', 'druid_role')) # druid_role was extended to grant access to the druid_access_ds_2 druid_role = sm.find_role('druid_role') perm_view = sm.find_permission_view_menu( 'datasource_access', druid_ds_2_perm) self.assertIn(perm_view, druid_role.permissions) # cleanup gamma_user = sm.find_user(username='gamma') gamma_user.roles.remove(sm.find_role('druid_role')) gamma_user.roles.remove(sm.find_role(TEST_ROLE_NAME)) session.delete(sm.find_role('druid_role')) session.delete(sm.find_role(TEST_ROLE_NAME)) session.commit() def test_request_access(self): session = db.session self.logout() self.login(username='gamma') gamma_user = sm.find_user(username='gamma') sm.add_role('dummy_role') gamma_user.roles.append(sm.find_role('dummy_role')) session.commit() ACCESS_REQUEST = ( '/superset/request_access?' 'datasource_type={}&' 'datasource_id={}&' 'action={}&') ROLE_EXTEND_LINK = ( 'Extend {} Role') ROLE_GRANT_LINK = ( 'Grant {} Role') # Request table access, there are no roles have this table. table1 = session.query(SqlaTable).filter_by( table_name='random_time_series').first() table_1_id = table1.id # request access to the table resp = self.get_resp( ACCESS_REQUEST.format('table', table_1_id, 'go')) assert "Access was requested" in resp access_request1 = self.get_access_requests('gamma', 'table', table_1_id) assert access_request1 is not None # Request access, roles exist that contains the table. # add table to the existing roles table3 = session.query(SqlaTable).filter_by( table_name='energy_usage').first() table_3_id = table3.id table3_perm = table3.perm sm.add_role('energy_usage_role') alpha_role = sm.find_role('Alpha') sm.add_permission_role( alpha_role, sm.find_permission_view_menu('datasource_access', table3_perm)) sm.add_permission_role( sm.find_role("energy_usage_role"), sm.find_permission_view_menu('datasource_access', table3_perm)) session.commit() self.get_resp( ACCESS_REQUEST.format('table', table_3_id, 'go')) access_request3 = self.get_access_requests('gamma', 'table', table_3_id) approve_link_3 = ROLE_GRANT_LINK.format( 'table', table_3_id, 'gamma', 'energy_usage_role', 'energy_usage_role') self.assertEqual(access_request3.roles_with_datasource, '
  • {}
'.format(approve_link_3)) # Request druid access, there are no roles have this table. druid_ds_4 = session.query(DruidDatasource).filter_by( datasource_name='druid_ds_1').first() druid_ds_4_id = druid_ds_4.id # request access to the table self.get_resp(ACCESS_REQUEST.format('druid', druid_ds_4_id, 'go')) access_request4 = self.get_access_requests('gamma', 'druid', druid_ds_4_id) self.assertEqual( access_request4.roles_with_datasource, '
    '.format(access_request4.id)) # Case 5. Roles exist that contains the druid datasource. # add druid ds to the existing roles druid_ds_5 = session.query(DruidDatasource).filter_by( datasource_name='druid_ds_2').first() druid_ds_5_id = druid_ds_5.id druid_ds_5_perm = druid_ds_5.perm druid_ds_2_role = sm.add_role('druid_ds_2_role') admin_role = sm.find_role('Admin') sm.add_permission_role( admin_role, sm.find_permission_view_menu('datasource_access', druid_ds_5_perm)) sm.add_permission_role( druid_ds_2_role, sm.find_permission_view_menu('datasource_access', druid_ds_5_perm)) session.commit() self.get_resp(ACCESS_REQUEST.format('druid', druid_ds_5_id, 'go')) access_request5 = self.get_access_requests( 'gamma', 'druid', druid_ds_5_id) approve_link_5 = ROLE_GRANT_LINK.format( 'druid', druid_ds_5_id, 'gamma', 'druid_ds_2_role', 'druid_ds_2_role') self.assertEqual(access_request5.roles_with_datasource, '
    • {}
    '.format(approve_link_5)) # cleanup gamma_user = sm.find_user(username='gamma') gamma_user.roles.remove(sm.find_role('dummy_role')) session.commit() def test_update_role_do_not_exist(self): update_role_str = 'update_me' update_role = sm.find_role(update_role_str) if update_role: db.session.delete(update_role) db.session.commit() data = json.dumps({ 'users': [{ 'username': 'gamma', 'first_name': 'Gamma', 'last_name': 'Gamma', 'email': 'gamma@superset.com', }], 'role_name': update_role_str}) r = self.client.post('/superset/update_role/', data=data, follow_redirects=True) self.assertEquals(500, r.status_code) def test_update_role(self): update_role_str = 'update_me' sm.add_role(update_role_str) db.session.commit() resp = self.client.post( '/superset/update_role/', data=json.dumps({ 'users': [{ 'username': 'gamma', 'first_name': 'Gamma', 'last_name': 'Gamma', 'email': 'gamma@superset.com' }], 'role_name': update_role_str }), follow_redirects=True ) update_role = sm.find_role(update_role_str) self.assertEquals( update_role.user, [sm.find_user(username='gamma')]) self.assertEquals(resp.status_code, 201) resp = self.client.post( '/superset/update_role/', data=json.dumps({ 'users': [{ 'username': 'alpha', 'first_name': 'Alpha', 'last_name': 'Alpha', 'email': 'alpha@superset.com' }, { 'username': 'unknown', 'first_name': 'Unknown1', 'last_name': 'Unknown2', 'email': 'unknown@superset.com' }], 'role_name': update_role_str }), follow_redirects=True ) self.assertEquals(resp.status_code, 201) update_role = sm.find_role(update_role_str) self.assertEquals( update_role.user, [ sm.find_user(username='alpha'), sm.find_user(username='unknown'), ]) unknown = sm.find_user(username='unknown') self.assertEquals('Unknown2', unknown.last_name) self.assertEquals('Unknown1', unknown.first_name) self.assertEquals('unknown@superset.com', unknown.email) db.session.delete(update_role) db.session.delete(unknown) db.session.commit() if __name__ == '__main__': unittest.main() """Float tests Made for Jython. """ import math import sys import unittest from test import test_support jython = test_support.is_jython class FloatTestCase(unittest.TestCase): def test_float_repr(self): self.assertEqual(repr(12345678.000000005), '12345678.000000006') self.assertEqual(repr(12345678.0000000005), '12345678.0') self.assertRegexpMatches(repr(math.pi**-100), '1.927581416056020[0-9]e-50') self.assertEqual(repr(-1.0), '-1.0') self.assertEqual(repr(-9876.543210), '-9876.54321') self.assertEqual(repr(0.123456789e+35), '1.23456789e+34') def test_float_repr2(self): # Quite possibly these divergences result from JDK bug JDK-4511638: self.assertEqual(repr(9876.543210e+15), jython and '9.876543209999999e+18' or '9.87654321e+18') self.assertEqual(repr(1235235235235240000.0), jython and '1.2352352352352399e+18' or '1.23523523523524e+18') def test_float_str(self): self.assertEqual(str(12345678.000005), '12345678.0') self.assertEqual(str(12345678.00005), '12345678.0001') self.assertEqual(str(12345678.00005), '12345678.0001') self.assertEqual(str(12345678.0005), '12345678.0005') self.assertEqual(str(math.pi**-100), '1.92758141606e-50') self.assertEqual(str(0.0), '0.0') self.assertEqual(str(-1.0), '-1.0') self.assertEqual(str(-9876.543210), '-9876.54321') self.assertEqual(str(23456789012E666), 'inf') self.assertEqual(str(-23456789012E666), '-inf') def test_float_str_formatting(self): self.assertEqual('%.13g' % 12345678.00005, '12345678.00005') self.assertEqual('%.12g' % 12345678.00005, '12345678.0001') self.assertEqual('%.11g' % 12345678.00005, '12345678') self.assertEqual('%.12g' % math.pi**-100, '1.92758141606e-50') self.assertEqual('%.5g' % 123.005, '123') self.assertEqual('%#.5g' % 123.005, '123.00') self.assertEqual('%#g' % 0.001, '0.00100000') self.assertEqual('%#.5g' % 0.001, '0.0010000') self.assertEqual('%#.1g' % 0.0001, '0.0001') self.assertEqual('%#.4g' % 100, '100.0') self.assertEqual('%#.4g' % 100.25, '100.2') self.assertEqual('%g' % 0.00001, '1e-05') self.assertEqual('%#g' % 0.00001, '1.00000e-05') self.assertEqual('%e' % -400.0, '-4.000000e+02') self.assertEqual('%.2g' % 99, '99') self.assertEqual('%.2g' % 100, '1e+02') def test_overflow(self): shuge = '12345' * 120 shuge_float = float(shuge) shuge_int = int(shuge) self.assertRaises(OverflowError, float, shuge_int) self.assertRaises(OverflowError, int, shuge_float) # and cmp should not overflow self.assertNotEqual(0.1, shuge_int) def test_nan(self): nan = float('nan') self.assert_(type(nan), float) if jython: # support Java syntax self.assert_(type(float('NaN')), float) self.assertNotEqual(nan, float('nan')) self.assertNotEqual(nan, nan) self.assertEqual(cmp(nan, float('nan')), 1) self.assertEqual(cmp(nan, nan), 0) for i in (-1, 1, -1.0, 1.0): self.assertEqual(cmp(nan, i), -1) self.assertEqual(cmp(i, nan), 1) def test_infinity(self): self.assert_(type(float('Infinity')), float) self.assert_(type(float('inf')), float) self.assertRaises(OverflowError, long, float('Infinity')) def test_minus_zero(self): # Some operations confused by -0.0 mz = float('-0.0') self.assertEquals(mz, 0.) self.assertEquals(repr(mz)[0], '-') self.assertEquals(repr(abs(mz))[0], '0') def test_float_none(self): self.assertRaises(TypeError, float, None) def test_pow(self): class Foo(object): def __rpow__(self, other): return other ** 2 self.assertEqual(4.0 ** Foo(), 16.0) # regression in 2.5 alphas self.assertEqual((4.0).__pow__(2, None), 16.0) def test_faux(self): class F(object): def __float__(self): return 1.6 self.assertEqual(math.cos(1.6), math.cos(F())) def test_main(): test_support.run_unittest(FloatTestCase) if __name__ == '__main__': test_main() """ Here is probably the place to write the docs, since the test-cases show how the type behave. Later... """ from ctypes import * from ctypes.test import need_symbol import sys, unittest try: WINFUNCTYPE except NameError: # fake to enable this test on Linux WINFUNCTYPE = CFUNCTYPE import _ctypes_test dll = CDLL(_ctypes_test.__file__) if sys.platform == "win32": windll = WinDLL(_ctypes_test.__file__) class POINT(Structure): _fields_ = [("x", c_int), ("y", c_int)] class RECT(Structure): _fields_ = [("left", c_int), ("top", c_int), ("right", c_int), ("bottom", c_int)] class FunctionTestCase(unittest.TestCase): def test_mro(self): # in Python 2.3, this raises TypeError: MRO conflict among bases classes, # in Python 2.2 it works. # # But in early versions of _ctypes.c, the result of tp_new # wasn't checked, and it even crashed Python. # Found by Greg Chapman. try: class X(object, Array): _length_ = 5 _type_ = "i" except TypeError: pass from _ctypes import _Pointer try: class X(object, _Pointer): pass except TypeError: pass from _ctypes import _SimpleCData try: class X(object, _SimpleCData): _type_ = "i" except TypeError: pass try: class X(object, Structure): _fields_ = [] except TypeError: pass @need_symbol('c_wchar') def test_wchar_parm(self): f = dll._testfunc_i_bhilfd f.argtypes = [c_byte, c_wchar, c_int, c_long, c_float, c_double] result = f(1, u"x", 3, 4, 5.0, 6.0) self.assertEqual(result, 139) self.assertEqual(type(result), int) @need_symbol('c_wchar') def test_wchar_result(self): f = dll._testfunc_i_bhilfd f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double] f.restype = c_wchar result = f(0, 0, 0, 0, 0, 0) self.assertEqual(result, u'\x00') def test_voidresult(self): f = dll._testfunc_v f.restype = None f.argtypes = [c_int, c_int, POINTER(c_int)] result = c_int() self.assertEqual(None, f(1, 2, byref(result))) self.assertEqual(result.value, 3) def test_intresult(self): f = dll._testfunc_i_bhilfd f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double] f.restype = c_int result = f(1, 2, 3, 4, 5.0, 6.0) self.assertEqual(result, 21) self.assertEqual(type(result), int) result = f(-1, -2, -3, -4, -5.0, -6.0) self.assertEqual(result, -21) self.assertEqual(type(result), int) # If we declare the function to return a short, # is the high part split off? f.restype = c_short result = f(1, 2, 3, 4, 5.0, 6.0) self.assertEqual(result, 21) self.assertEqual(type(result), int) result = f(1, 2, 3, 0x10004, 5.0, 6.0) self.assertEqual(result, 21) self.assertEqual(type(result), int) # You cannot assign character format codes as restype any longer self.assertRaises(TypeError, setattr, f, "restype", "i") def test_floatresult(self): f = dll._testfunc_f_bhilfd f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double] f.restype = c_float result = f(1, 2, 3, 4, 5.0, 6.0) self.assertEqual(result, 21) self.assertEqual(type(result), float) result = f(-1, -2, -3, -4, -5.0, -6.0) self.assertEqual(result, -21) self.assertEqual(type(result), float) def test_doubleresult(self): f = dll._testfunc_d_bhilfd f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double] f.restype = c_double result = f(1, 2, 3, 4, 5.0, 6.0) self.assertEqual(result, 21) self.assertEqual(type(result), float) result = f(-1, -2, -3, -4, -5.0, -6.0) self.assertEqual(result, -21) self.assertEqual(type(result), float) def test_longdoubleresult(self): f = dll._testfunc_D_bhilfD f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_longdouble] f.restype = c_longdouble result = f(1, 2, 3, 4, 5.0, 6.0) self.assertEqual(result, 21) self.assertEqual(type(result), float) result = f(-1, -2, -3, -4, -5.0, -6.0) self.assertEqual(result, -21) self.assertEqual(type(result), float) @need_symbol('c_longlong') def test_longlongresult(self): f = dll._testfunc_q_bhilfd f.restype = c_longlong f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double] result = f(1, 2, 3, 4, 5.0, 6.0) self.assertEqual(result, 21) f = dll._testfunc_q_bhilfdq f.restype = c_longlong f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double, c_longlong] result = f(1, 2, 3, 4, 5.0, 6.0, 21) self.assertEqual(result, 42) def test_stringresult(self): f = dll._testfunc_p_p f.argtypes = None f.restype = c_char_p result = f("123") self.assertEqual(result, "123") result = f(None) self.assertEqual(result, None) def test_pointers(self): f = dll._testfunc_p_p f.restype = POINTER(c_int) f.argtypes = [POINTER(c_int)] # This only works if the value c_int(42) passed to the # function is still alive while the pointer (the result) is # used. v = c_int(42) self.assertEqual(pointer(v).contents.value, 42) result = f(pointer(v)) self.assertEqual(type(result), POINTER(c_int)) self.assertEqual(result.contents.value, 42) # This on works... result = f(pointer(v)) self.assertEqual(result.contents.value, v.value) p = pointer(c_int(99)) result = f(p) self.assertEqual(result.contents.value, 99) arg = byref(v) result = f(arg) self.assertNotEqual(result.contents, v.value) self.assertRaises(ArgumentError, f, byref(c_short(22))) # It is dangerous, however, because you don't control the lifetime # of the pointer: result = f(byref(c_int(99))) self.assertNotEqual(result.contents, 99) def test_errors(self): f = dll._testfunc_p_p f.restype = c_int class X(Structure): _fields_ = [("y", c_int)] self.assertRaises(TypeError, f, X()) #cannot convert parameter ################################################################ def test_shorts(self): f = dll._testfunc_callback_i_if args = [] expected = [262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1] def callback(v): args.append(v) return v CallBack = CFUNCTYPE(c_int, c_int) cb = CallBack(callback) f(2**18, cb) self.assertEqual(args, expected) ################################################################ def test_callbacks(self): f = dll._testfunc_callback_i_if f.restype = c_int f.argtypes = None MyCallback = CFUNCTYPE(c_int, c_int) def callback(value): #print "called back with", value return value cb = MyCallback(callback) result = f(-10, cb) self.assertEqual(result, -18) # test with prototype f.argtypes = [c_int, MyCallback] cb = MyCallback(callback) result = f(-10, cb) self.assertEqual(result, -18) AnotherCallback = WINFUNCTYPE(c_int, c_int, c_int, c_int, c_int) # check that the prototype works: we call f with wrong # argument types cb = AnotherCallback(callback) self.assertRaises(ArgumentError, f, -10, cb) def test_callbacks_2(self): # Can also use simple datatypes as argument type specifiers # for the callback function. # In this case the call receives an instance of that type f = dll._testfunc_callback_i_if f.restype = c_int MyCallback = CFUNCTYPE(c_int, c_int) f.argtypes = [c_int, MyCallback] def callback(value): #print "called back with", value self.assertEqual(type(value), int) return value cb = MyCallback(callback) result = f(-10, cb) self.assertEqual(result, -18) @need_symbol('c_longlong') def test_longlong_callbacks(self): f = dll._testfunc_callback_q_qf f.restype = c_longlong MyCallback = CFUNCTYPE(c_longlong, c_longlong) f.argtypes = [c_longlong, MyCallback] def callback(value): self.assertIsInstance(value, (int, long)) return value & 0x7FFFFFFF cb = MyCallback(callback) self.assertEqual(13577625587, f(1000000000000, cb)) def test_errors(self): self.assertRaises(AttributeError, getattr, dll, "_xxx_yyy") self.assertRaises(ValueError, c_int.in_dll, dll, "_xxx_yyy") def test_byval(self): # without prototype ptin = POINT(1, 2) ptout = POINT() # EXPORT int _testfunc_byval(point in, point *pout) result = dll._testfunc_byval(ptin, byref(ptout)) got = result, ptout.x, ptout.y expected = 3, 1, 2 self.assertEqual(got, expected) # with prototype ptin = POINT(101, 102) ptout = POINT() dll._testfunc_byval.argtypes = (POINT, POINTER(POINT)) dll._testfunc_byval.restype = c_int result = dll._testfunc_byval(ptin, byref(ptout)) got = result, ptout.x, ptout.y expected = 203, 101, 102 self.assertEqual(got, expected) def test_struct_return_2H(self): class S2H(Structure): _fields_ = [("x", c_short), ("y", c_short)] dll.ret_2h_func.restype = S2H dll.ret_2h_func.argtypes = [S2H] inp = S2H(99, 88) s2h = dll.ret_2h_func(inp) self.assertEqual((s2h.x, s2h.y), (99*2, 88*3)) @unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') def test_struct_return_2H_stdcall(self): class S2H(Structure): _fields_ = [("x", c_short), ("y", c_short)] windll.s_ret_2h_func.restype = S2H windll.s_ret_2h_func.argtypes = [S2H] s2h = windll.s_ret_2h_func(S2H(99, 88)) self.assertEqual((s2h.x, s2h.y), (99*2, 88*3)) def test_struct_return_8H(self): class S8I(Structure): _fields_ = [("a", c_int), ("b", c_int), ("c", c_int), ("d", c_int), ("e", c_int), ("f", c_int), ("g", c_int), ("h", c_int)] dll.ret_8i_func.restype = S8I dll.ret_8i_func.argtypes = [S8I] inp = S8I(9, 8, 7, 6, 5, 4, 3, 2) s8i = dll.ret_8i_func(inp) self.assertEqual((s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h), (9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9)) @unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') def test_struct_return_8H_stdcall(self): class S8I(Structure): _fields_ = [("a", c_int), ("b", c_int), ("c", c_int), ("d", c_int), ("e", c_int), ("f", c_int), ("g", c_int), ("h", c_int)] windll.s_ret_8i_func.restype = S8I windll.s_ret_8i_func.argtypes = [S8I] inp = S8I(9, 8, 7, 6, 5, 4, 3, 2) s8i = windll.s_ret_8i_func(inp) self.assertEqual( (s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h), (9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9)) def test_sf1651235(self): # see http://www.python.org/sf/1651235 proto = CFUNCTYPE(c_int, RECT, POINT) def callback(*args): return 0 callback = proto(callback) self.assertRaises(ArgumentError, lambda: callback((1, 2, 3, 4), POINT())) if __name__ == '__main__': unittest.main() ######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # KOI8-R language model # Character Mapping Table: KOI8R_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 ) win1251_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, ) latin5_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, 239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, ) macCyrillic_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, ) IBM855_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, 206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, 220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, 230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, 250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, ) IBM866_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, 239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, ) # Model Table: # total sequences: 100% # first 512 sequences: 97.6601% # first 1024 sequences: 2.3389% # rest sequences: 0.1237% # negative sequences: 0.0009% RussianLangModel = ( 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, 3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, 0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, 0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, 1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, 1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, 2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, 1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, 3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, 1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, 2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, 1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, 1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, 1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, 2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, 1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, 3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, 1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, 2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, 1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, 2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, 1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, 1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, 1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, 3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, 2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, 3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, 1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, 1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, 0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, 2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, 1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, 1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, 0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, 1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, 2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, 2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, 1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, 1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, 2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, 1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, 0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, 2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, 1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, 1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, 0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, 0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, 0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, 0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, 0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, 1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, 0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, 2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, 0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, ) Koi8rModel = { 'charToOrderMap': KOI8R_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "KOI8-R" } Win1251CyrillicModel = { 'charToOrderMap': win1251_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "windows-1251" } Latin5CyrillicModel = { 'charToOrderMap': latin5_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "ISO-8859-5" } MacCyrillicModel = { 'charToOrderMap': macCyrillic_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "MacCyrillic" }; Ibm866Model = { 'charToOrderMap': IBM866_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "IBM866" } Ibm855Model = { 'charToOrderMap': IBM855_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "IBM855" } # flake8: noqa from settings_local import SUBSCRIPTION_ID, STORAGE_ACCOUNT_NAME, STORAGE_ACCOUNT_KEY, EMAIL_USERNAME, EMAIL_PASSWORD __author__ = 'Natalie Sanders' __author__ = 'Natalie Sanders' from azure.servicemanagement import * from azure.storage import * from subprocess import call from os import chdir import os import socket import zipfile import pickle import smtplib from email.mime.multipart import MIMEMultipart from email.mime.base import MIMEBase from email.mime.text import MIMEText from email.utils import COMMASPACE, formatdate from email import encoders global user_info def delete_vm(): hosted_service = sms.get_hosted_service_properties(service_name=username, embed_detail=True) if hosted_service.deployments: deployment = sms.get_deployment_by_name(username, username) roles = deployment.role_list for instance in roles: if machine_name == instance.role_name: if len(roles) == 1: sms.delete_deployment(service_name=username, deployment_name=username) else: sms.delete_role(service_name=username, deployment_name=username, role_name=machine_name) break def send_mail( send_from, send_to, subject, text, files=[], server="localhost", port=587, username='', password='', isTls=True): msg = MIMEMultipart() msg['From'] = send_from msg['To'] = send_to msg['Date'] = formatdate(localtime = True) msg['Subject'] = subject msg.attach( MIMEText(text) ) for f in files: part = MIMEBase('application', "octet-stream") part.set_payload( open(f,"rb").read() ) encoders.encode_base64(part) part.add_header('Content-Disposition', 'attachment; filename="{0}"'.format(os.path.basename(f))) msg.attach(part) smtp = smtplib.SMTP(server, port) if isTls: smtp.starttls() smtp.login(username,password) smtp.sendmail(send_from, send_to, msg.as_string()) smtp.quit() print "emailed\n" def upload_results(): z = zipfile.ZipFile(user_info["sim"]+'_Results.zip', "w", zipfile.ZIP_DEFLATED) for f in os.listdir("Input"): chdir("c:/Users/Public/Sim/Input") z.write(f) chdir("c:/Users/Public/Sim/Output") z.write("stdout.txt") z.close() result = 'r-' + machine_name blob_service.put_block_blob_from_path(container_name, result, 'c:/Users/Public/Sim/Output.zip') print "uploaded\n" def download_input(): blob_service.get_blob_to_path(container_name, machine_name, 'c:/Users/Public/Sim/Input.zip') chdir("C:/Users/Public/Sim") z = zipfile.ZipFile('Input.zip', 'r') z.extractall('Input') z.close() print "downloaded\n" ######################################################################################################################## ## MAIN ## ######################################################################################################################## ##### Service Management Object ##### machine_name = socket.gethostname() split = machine_name.split('-') container_name = '-'.join(split[:-1]).lower() username = '-'.join(split[:-1]) subscription_id = SUBSCRIPTION_ID certificate_path = 'CURRENT_USER\\my\\AzureCertificate' call(['certutil', '-user', '-f', '-p', '1', '-importPFX', 'c:/temp/azure.pfx']) sms = ServiceManagementService(subscription_id, certificate_path) ###### Redirect stdout to File ###### chdir('C:/Users/Public/Sim') output = open("Output/stdout.txt", "w+") ####### Download Input Files ######## blob_service = BlobService( account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY) try: download_input() f = "C:/Users/Public/Sim/Input/AzureUserInfo.pickle" user_info = pickle.load(file(f)) output.write('Mock model executed correctly.') output.close() print "download input" except: output.write('Could not download input from the cloud.\n') output.close() try: ########### Upload Results ########## upload_results() ########### Email Results ########### send_mail( send_from = 'vecnet.results@gmail.com', send_to = user_info["email"], subject = 'The results for your ' + user_info["sim"] + ' simulation are ready!', text = 'Hi ' + user_info['username'] + ',\n\nYour ' + user_info["sim"] + ' simulation has ' 'finished running. Look for your results below.\n\nThanks for using VecNet Azure ' 'resources!\nThe VecNet Team', files = ['c:/Users/Public/Sim/' + user_info["sim"] + '_Results.zip'], server = "smtp.gmail.com", port = 587, username = EMAIL_USERNAME, password = EMAIL_PASSWORD, isTls = True) print "sent mail" ############# Exit Script ############# finally: delete_vm() # Tests that work for both bytes and buffer objects. # See PEP 3137. import struct import sys class MixinBytesBufferCommonTests(object): """Tests that work for both bytes and buffer objects. See PEP 3137. """ def marshal(self, x): """Convert x into the appropriate type for these tests.""" raise RuntimeError('test class must provide a marshal method') def test_islower(self): self.assertFalse(self.marshal(b'').islower()) self.assert_(self.marshal(b'a').islower()) self.assertFalse(self.marshal(b'A').islower()) self.assertFalse(self.marshal(b'\n').islower()) self.assert_(self.marshal(b'abc').islower()) self.assertFalse(self.marshal(b'aBc').islower()) self.assert_(self.marshal(b'abc\n').islower()) self.assertRaises(TypeError, self.marshal(b'abc').islower, 42) def test_isupper(self): self.assertFalse(self.marshal(b'').isupper()) self.assertFalse(self.marshal(b'a').isupper()) self.assert_(self.marshal(b'A').isupper()) self.assertFalse(self.marshal(b'\n').isupper()) self.assert_(self.marshal(b'ABC').isupper()) self.assertFalse(self.marshal(b'AbC').isupper()) self.assert_(self.marshal(b'ABC\n').isupper()) self.assertRaises(TypeError, self.marshal(b'abc').isupper, 42) def test_istitle(self): self.assertFalse(self.marshal(b'').istitle()) self.assertFalse(self.marshal(b'a').istitle()) self.assert_(self.marshal(b'A').istitle()) self.assertFalse(self.marshal(b'\n').istitle()) self.assert_(self.marshal(b'A Titlecased Line').istitle()) self.assert_(self.marshal(b'A\nTitlecased Line').istitle()) self.assert_(self.marshal(b'A Titlecased, Line').istitle()) self.assertFalse(self.marshal(b'Not a capitalized String').istitle()) self.assertFalse(self.marshal(b'Not\ta Titlecase String').istitle()) self.assertFalse(self.marshal(b'Not--a Titlecase String').istitle()) self.assertFalse(self.marshal(b'NOT').istitle()) self.assertRaises(TypeError, self.marshal(b'abc').istitle, 42) def test_isspace(self): self.assertFalse(self.marshal(b'').isspace()) self.assertFalse(self.marshal(b'a').isspace()) self.assert_(self.marshal(b' ').isspace()) self.assert_(self.marshal(b'\t').isspace()) self.assert_(self.marshal(b'\r').isspace()) self.assert_(self.marshal(b'\n').isspace()) self.assert_(self.marshal(b' \t\r\n').isspace()) self.assertFalse(self.marshal(b' \t\r\na').isspace()) self.assertRaises(TypeError, self.marshal(b'abc').isspace, 42) def test_isalpha(self): self.assertFalse(self.marshal(b'').isalpha()) self.assert_(self.marshal(b'a').isalpha()) self.assert_(self.marshal(b'A').isalpha()) self.assertFalse(self.marshal(b'\n').isalpha()) self.assert_(self.marshal(b'abc').isalpha()) self.assertFalse(self.marshal(b'aBc123').isalpha()) self.assertFalse(self.marshal(b'abc\n').isalpha()) self.assertRaises(TypeError, self.marshal(b'abc').isalpha, 42) def test_isalnum(self): self.assertFalse(self.marshal(b'').isalnum()) self.assert_(self.marshal(b'a').isalnum()) self.assert_(self.marshal(b'A').isalnum()) self.assertFalse(self.marshal(b'\n').isalnum()) self.assert_(self.marshal(b'123abc456').isalnum()) self.assert_(self.marshal(b'a1b3c').isalnum()) self.assertFalse(self.marshal(b'aBc000 ').isalnum()) self.assertFalse(self.marshal(b'abc\n').isalnum()) self.assertRaises(TypeError, self.marshal(b'abc').isalnum, 42) def test_isdigit(self): self.assertFalse(self.marshal(b'').isdigit()) self.assertFalse(self.marshal(b'a').isdigit()) self.assert_(self.marshal(b'0').isdigit()) self.assert_(self.marshal(b'0123456789').isdigit()) self.assertFalse(self.marshal(b'0123456789a').isdigit()) self.assertRaises(TypeError, self.marshal(b'abc').isdigit, 42) def test_lower(self): self.assertEqual(b'hello', self.marshal(b'HeLLo').lower()) self.assertEqual(b'hello', self.marshal(b'hello').lower()) self.assertRaises(TypeError, self.marshal(b'hello').lower, 42) def test_upper(self): self.assertEqual(b'HELLO', self.marshal(b'HeLLo').upper()) self.assertEqual(b'HELLO', self.marshal(b'HELLO').upper()) self.assertRaises(TypeError, self.marshal(b'hello').upper, 42) def test_capitalize(self): self.assertEqual(b' hello ', self.marshal(b' hello ').capitalize()) self.assertEqual(b'Hello ', self.marshal(b'Hello ').capitalize()) self.assertEqual(b'Hello ', self.marshal(b'hello ').capitalize()) self.assertEqual(b'Aaaa', self.marshal(b'aaaa').capitalize()) self.assertEqual(b'Aaaa', self.marshal(b'AaAa').capitalize()) self.assertRaises(TypeError, self.marshal(b'hello').capitalize, 42) def test_ljust(self): self.assertEqual(b'abc ', self.marshal(b'abc').ljust(10)) self.assertEqual(b'abc ', self.marshal(b'abc').ljust(6)) self.assertEqual(b'abc', self.marshal(b'abc').ljust(3)) self.assertEqual(b'abc', self.marshal(b'abc').ljust(2)) self.assertEqual(b'abc*******', self.marshal(b'abc').ljust(10, '*')) self.assertRaises(TypeError, self.marshal(b'abc').ljust) def test_rjust(self): self.assertEqual(b' abc', self.marshal(b'abc').rjust(10)) self.assertEqual(b' abc', self.marshal(b'abc').rjust(6)) self.assertEqual(b'abc', self.marshal(b'abc').rjust(3)) self.assertEqual(b'abc', self.marshal(b'abc').rjust(2)) self.assertEqual(b'*******abc', self.marshal(b'abc').rjust(10, '*')) self.assertRaises(TypeError, self.marshal(b'abc').rjust) def test_center(self): self.assertEqual(b' abc ', self.marshal(b'abc').center(10)) self.assertEqual(b' abc ', self.marshal(b'abc').center(6)) self.assertEqual(b'abc', self.marshal(b'abc').center(3)) self.assertEqual(b'abc', self.marshal(b'abc').center(2)) self.assertEqual(b'***abc****', self.marshal(b'abc').center(10, '*')) self.assertRaises(TypeError, self.marshal(b'abc').center) def test_swapcase(self): self.assertEqual(b'hEllO CoMPuTErS', self.marshal(b'HeLLo cOmpUteRs').swapcase()) self.assertRaises(TypeError, self.marshal(b'hello').swapcase, 42) def test_zfill(self): self.assertEqual(b'123', self.marshal(b'123').zfill(2)) self.assertEqual(b'123', self.marshal(b'123').zfill(3)) self.assertEqual(b'0123', self.marshal(b'123').zfill(4)) self.assertEqual(b'+123', self.marshal(b'+123').zfill(3)) self.assertEqual(b'+123', self.marshal(b'+123').zfill(4)) self.assertEqual(b'+0123', self.marshal(b'+123').zfill(5)) self.assertEqual(b'-123', self.marshal(b'-123').zfill(3)) self.assertEqual(b'-123', self.marshal(b'-123').zfill(4)) self.assertEqual(b'-0123', self.marshal(b'-123').zfill(5)) self.assertEqual(b'000', self.marshal(b'').zfill(3)) self.assertEqual(b'34', self.marshal(b'34').zfill(1)) self.assertEqual(b'0034', self.marshal(b'34').zfill(4)) self.assertRaises(TypeError, self.marshal(b'123').zfill) def test_expandtabs(self): self.assertEqual(b'abc\rab def\ng hi', self.marshal(b'abc\rab\tdef\ng\thi').expandtabs()) self.assertEqual(b'abc\rab def\ng hi', self.marshal(b'abc\rab\tdef\ng\thi').expandtabs(8)) self.assertEqual(b'abc\rab def\ng hi', self.marshal(b'abc\rab\tdef\ng\thi').expandtabs(4)) self.assertEqual(b'abc\r\nab def\ng hi', self.marshal(b'abc\r\nab\tdef\ng\thi').expandtabs(4)) self.assertEqual(b'abc\rab def\ng hi', self.marshal(b'abc\rab\tdef\ng\thi').expandtabs()) self.assertEqual(b'abc\rab def\ng hi', self.marshal(b'abc\rab\tdef\ng\thi').expandtabs(8)) self.assertEqual(b'abc\r\nab\r\ndef\ng\r\nhi', self.marshal(b'abc\r\nab\r\ndef\ng\r\nhi').expandtabs(4)) self.assertEqual(b' a\n b', self.marshal(b' \ta\n\tb').expandtabs(1)) self.assertRaises(TypeError, self.marshal(b'hello').expandtabs, 42, 42) # This test is only valid when sizeof(int) == sizeof(void*) == 4. if sys.maxint < (1 << 32) and struct.calcsize('P') == 4: self.assertRaises(OverflowError, self.marshal(b'\ta\n\tb').expandtabs, sys.maxint) def test_title(self): self.assertEqual(b' Hello ', self.marshal(b' hello ').title()) self.assertEqual(b'Hello ', self.marshal(b'hello ').title()) self.assertEqual(b'Hello ', self.marshal(b'Hello ').title()) self.assertEqual(b'Format This As Title String', self.marshal(b'fOrMaT thIs aS titLe String').title()) self.assertEqual(b'Format,This-As*Title;String', self.marshal(b'fOrMaT,thIs-aS*titLe;String').title()) self.assertEqual(b'Getint', self.marshal(b'getInt').title()) self.assertRaises(TypeError, self.marshal(b'hello').title, 42) def test_splitlines(self): self.assertEqual([b'abc', b'def', b'', b'ghi'], self.marshal(b'abc\ndef\n\rghi').splitlines()) self.assertEqual([b'abc', b'def', b'', b'ghi'], self.marshal(b'abc\ndef\n\r\nghi').splitlines()) self.assertEqual([b'abc', b'def', b'ghi'], self.marshal(b'abc\ndef\r\nghi').splitlines()) self.assertEqual([b'abc', b'def', b'ghi'], self.marshal(b'abc\ndef\r\nghi\n').splitlines()) self.assertEqual([b'abc', b'def', b'ghi', b''], self.marshal(b'abc\ndef\r\nghi\n\r').splitlines()) self.assertEqual([b'', b'abc', b'def', b'ghi', b''], self.marshal(b'\nabc\ndef\r\nghi\n\r').splitlines()) self.assertEqual([b'\n', b'abc\n', b'def\r\n', b'ghi\n', b'\r'], self.marshal(b'\nabc\ndef\r\nghi\n\r').splitlines(1)) self.assertRaises(TypeError, self.marshal(b'abc').splitlines, 42, 42) """ Installs and configures heat """ import uuid import logging import os from packstack.installer import utils from packstack.installer import validators from packstack.modules.ospluginutils import (getManifestTemplate, manifestfiles, appendManifestFile) controller = None # Plugin name PLUGIN_NAME = "OS-HEAT" PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_NAME, 'blue') logging.debug("plugin %s loaded", __name__) def initConfig(controllerObject): global controller controller = controllerObject logging.debug("Adding OpenStack Heat configuration") parameters = [ {"CMD_OPTION" : "heat-host", "USAGE" : ('The IP address of the server on which ' 'to install Heat service'), "PROMPT" : 'Enter the IP address of the Heat service', "OPTION_LIST" : [], "VALIDATORS" : [validators.validate_ssh], "DEFAULT_VALUE" : utils.get_localhost_ip(), "MASK_INPUT" : False, "LOOSE_VALIDATION": True, "CONF_NAME" : "CONFIG_HEAT_HOST", "USE_DEFAULT" : False, "NEED_CONFIRM" : False, "CONDITION" : False }, {"CMD_OPTION" : "heat-mysql-password", "USAGE" : 'The password used by Heat user to authenticate against MySQL', "PROMPT" : "Enter the password for the Heat MySQL user", "OPTION_LIST" : [], "VALIDATORS" : [validators.validate_not_empty], "DEFAULT_VALUE" : uuid.uuid4().hex[:16], "MASK_INPUT" : True, "LOOSE_VALIDATION": False, "CONF_NAME" : "CONFIG_HEAT_DB_PW", "USE_DEFAULT" : True, "NEED_CONFIRM" : True, "CONDITION" : False }, {"CMD_OPTION" : "heat-ks-passwd", "USAGE" : "The password to use for the Heat to authenticate with Keystone", "PROMPT" : "Enter the password for the Heat Keystone access", "OPTION_LIST" : [], "VALIDATORS" : [validators.validate_not_empty], "DEFAULT_VALUE" : uuid.uuid4().hex[:16], "MASK_INPUT" : True, "LOOSE_VALIDATION": False, "CONF_NAME" : "CONFIG_HEAT_KS_PW", "USE_DEFAULT" : True, "NEED_CONFIRM" : True, "CONDITION" : False }, {"CMD_OPTION" : "os-heat-cloudwatch-install", "USAGE" : ("Set to 'y' if you would like Packstack to " "install Heat CloudWatch API"), "PROMPT" : "Should Packstack install Heat CloudWatch API", "OPTION_LIST" : ["y", "n"], "VALIDATORS" : [validators.validate_options], "DEFAULT_VALUE" : "n", "MASK_INPUT" : False, "LOOSE_VALIDATION": False, "CONF_NAME" : "CONFIG_HEAT_CLOUDWATCH_INSTALL", "USE_DEFAULT" : False, "NEED_CONFIRM" : False, "CONDITION" : False }, {"CMD_OPTION" : "os-heat-cfn-install", "USAGE" : ("Set to 'y' if you would like Packstack to " "install Heat CloudFormation API"), "PROMPT" : "Should Packstack install Heat CloudFormation API", "OPTION_LIST" : ["y", "n"], "VALIDATORS" : [validators.validate_options], "DEFAULT_VALUE" : "n", "MASK_INPUT" : False, "LOOSE_VALIDATION": False, "CONF_NAME" : "CONFIG_HEAT_CFN_INSTALL", "USE_DEFAULT" : False, "NEED_CONFIRM" : False, "CONDITION" : False }, ] group = {"GROUP_NAME" : "Heat", "DESCRIPTION" : "Heat Config parameters", "PRE_CONDITION" : "CONFIG_HEAT_INSTALL", "PRE_CONDITION_MATCH" : "y", "POST_CONDITION" : False, "POST_CONDITION_MATCH": True} controller.addGroup(group, parameters) parameters = [ {"CMD_OPTION" : "heat-api-cloudwatch-host", "USAGE" : ('The IP address of the server on which ' 'to install Heat CloudWatch API service'), "PROMPT" : ('Enter the IP address of the Heat CloudWatch API ' 'server'), "OPTION_LIST" : [], "VALIDATORS" : [validators.validate_ssh], "DEFAULT_VALUE" : utils.get_localhost_ip(), "MASK_INPUT" : False, "LOOSE_VALIDATION": True, "CONF_NAME" : "CONFIG_HEAT_CLOUDWATCH_HOST", "USE_DEFAULT" : False, "NEED_CONFIRM" : False, "CONDITION" : False }, ] group = {"GROUP_NAME" : "Heat CloudWatch API", "DESCRIPTION" : "Heat CloudWatch API config parameters", "PRE_CONDITION" : "CONFIG_HEAT_CLOUDWATCH_INSTALL", "PRE_CONDITION_MATCH" : "y", "POST_CONDITION" : False, "POST_CONDITION_MATCH": True} controller.addGroup(group, parameters) parameters = [ {"CMD_OPTION" : "heat-api-cfn-host", "USAGE" : ('The IP address of the server on which ' 'to install Heat CloudFormation API service'), "PROMPT" : ('Enter the IP address of the Heat CloudFormation ' 'API server'), "OPTION_LIST" : [], "VALIDATORS" : [validators.validate_ssh], "DEFAULT_VALUE" : utils.get_localhost_ip(), "MASK_INPUT" : False, "LOOSE_VALIDATION": True, "CONF_NAME" : "CONFIG_HEAT_CFN_HOST", "USE_DEFAULT" : False, "NEED_CONFIRM" : False, "CONDITION" : False }, ] group = {"GROUP_NAME" : "Heat CloudFormation API", "DESCRIPTION" : "Heat CloudFormation API config parameters", "PRE_CONDITION" : "CONFIG_HEAT_CFN_INSTALL", "PRE_CONDITION_MATCH" : "y", "POST_CONDITION" : False, "POST_CONDITION_MATCH": True} controller.addGroup(group, parameters) def initSequences(controller): if controller.CONF['CONFIG_HEAT_INSTALL'] != 'y': return steps = [{'title': 'Adding Heat manifest entries', 'functions': [create_manifest]}, {'title': 'Adding Heat Keystone manifest entries', 'functions':[create_keystone_manifest]}] if controller.CONF.get('CONFIG_HEAT_CLOUDWATCH_INSTALL', 'n') == 'y': steps.append({'title': 'Adding Heat CloudWatch API manifest entries', 'functions': [create_cloudwatch_manifest]}) if controller.CONF.get('CONFIG_HEAT_CFN_INSTALL', 'n') == 'y': steps.append({'title': 'Adding Heat CloudFormation API manifest entries', 'functions': [create_cfn_manifest]}) controller.addSequence("Installing Heat", [], [], steps) def create_manifest(config): if config['CONFIG_HEAT_CLOUDWATCH_INSTALL'] == 'y': config['CONFIG_HEAT_WATCH_HOST'] = config['CONFIG_HEAT_CLOUDWATCH_HOST'] else: config['CONFIG_HEAT_WATCH_HOST'] = config['CONFIG_HEAT_HOST'] if config['CONFIG_HEAT_CFN_INSTALL'] == 'y': config['CONFIG_HEAT_METADATA_HOST'] = config['CONFIG_HEAT_CFN_HOST'] else: config['CONFIG_HEAT_METADATA_HOST'] = config['CONFIG_HEAT_HOST'] manifestfile = "%s_heat.pp" % controller.CONF['CONFIG_HEAT_HOST'] manifestdata = getManifestTemplate("heat.pp") appendManifestFile(manifestfile, manifestdata) def create_keystone_manifest(config): manifestfile = "%s_keystone.pp" % controller.CONF['CONFIG_KEYSTONE_HOST'] manifestdata = getManifestTemplate("keystone_heat.pp") appendManifestFile(manifestfile, manifestdata) def create_cloudwatch_manifest(config): manifestfile = "%s_heatcw.pp" % controller.CONF['CONFIG_HEAT_CLOUDWATCH_HOST'] manifestdata = getManifestTemplate("heat_cloudwatch.pp") appendManifestFile(manifestfile, manifestdata, marker='heat') def create_cfn_manifest(config): manifestfile = "%s_heatcnf.pp" % controller.CONF['CONFIG_HEAT_CFN_HOST'] manifestdata = getManifestTemplate("heat_cfn.pp") appendManifestFile(manifestfile, manifestdata, marker='heat') import sys import getopt import traceback import urllib2 from urlparse import urljoin, urlparse, ParseResult from BeautifulSoup import BeautifulSoup def connect(conn, url): assert conn is not None, 'Input connection must be valid' assert url, 'Input old URL cannot be empty' response = None try: response = conn.open(url) except urllib2.HTTPError as e: error_msg = 'Error {} connecting to {}'.format(e.code, url) sys.stderr.write(repr(error_msg) + '\n') except urllib2.URLError as e: error_msg = 'Error {} connecting to {}'.format(e.reason, url) sys.stderr.write(repr(error_msg) + '\n') except: error_msg = 'Error connecting to {}'.format(url) sys.stderr.write(repr(error_msg) + '\n') return response def crawl_page (conn, url, domain, visited_links=[]): assert conn is not None, 'Input connection must be valid' assert url, 'Input old URL cannot be empty' assert domain, 'Input old domain cannot be empty' assert isinstance(visited_links, list) visited_links.append(url) remaining_links = [] title = '' meta_desc = '' response = connect(conn, url) if not response is None: body = response.read() try: soup = BeautifulSoup(body) except: error_msg = 'Error parsing {}'.format(url) sys.stderr.write(error_msg + "\n") soup = None if not soup is None: if soup.html: if soup.html.head: title = soup.html.head.title.string or '' else: title ='' else: title = '' meta_desc = soup.findAll(attrs={"name":"description"}) if len (meta_desc) > 0: meta_desc = meta_desc[0]['content'] else: meta_desc = "" if visited_links: anchors = soup.findAll("a") for anchor in anchors: if anchor is None or not anchor.has_key('href'): continue try: href = anchor['href'] if domain in href or (not 'www' in href and not 'http' in href): link = urljoin('http://' + domain, href).split ("#")[0].lower() if not link in visited_links and link != '/' and not 'mailto' in link: if not link in visited_links: if not '.pdf' in link.lower() \ and not '.png' in link.lower() \ and not '.jpg' in link.lower(): remaining_links.append(link) except: print traceback.format_exc() print '{};{};{}'.format(url.encode('utf-8'), title.encode('utf-8').strip(' \n\t\r'), meta_desc.encode ('utf-8').strip(' \n\t\r')) assert visited_links, 'Output visited_links cannot be empty' return remaining_links, visited_links def clean_scheme(url): assert url, 'Input URL cannot be empty' scheme = 'http://' sections = url.split(scheme) if len(sections) == 1: url = scheme + url assert url, 'Output URL cannot be empty' assert scheme in url, 'Output URL must have a scheme' return url def replace_domain(source_url, new_domain): o = urlparse(source_url) return ParseResult(o.scheme, new_domain, o.path, o.params, o.query, o.fragment).geturl() def find_differences(old_domain, new_domain, verbose=False): old_domain = unicode(old_domain) new_domain = unicode(new_domain) old_url = clean_scheme(old_domain) conn = urllib2.build_opener() visited_links = [] remaining_links, visited_links = crawl_page(conn, old_url, old_domain, visited_links) new_url = replace_domain(old_url, new_domain) crawl_page(conn, new_url, new_domain) while True: if remaining_links: ln = remaining_links.pop() more_links, visited_links = crawl_page(conn, ln, old_domain, visited_links) new_ln = replace_domain(ln, new_domain) crawl_page(conn, new_ln, new_domain) remaining_links.extend(more_links) else: break def main(): old_domain = '' new_domain = '' version = '1.0' verbose = False help = False try: options, remainder = getopt.getopt(sys.argv[1:], 'o:n:', ['old_domain=', 'new_domain=' 'verbose', 'help' ]) except getopt.GetoptError: sys.exit(2) option_not_found = False for opt, arg in options: if opt in ('-o', '--old'): old_domain = arg elif opt in ('-n', '--new'): new_domain = arg elif opt in ('-v', '--verbose'): verbose = True elif opt in ('-h', '--help'): help = True else: option_not_found = True if not options or option_not_found or help: print 'Usage: {} -o -n '.format(sys.argv[0]) if help: sys.exit(0) else: sys.exit(1) find_differences(old_domain, new_domain, verbose) if __name__ == "__main__": main() """ Single figure and axes with two items ======================================= Only the pressure q[0] is plotted. In this example the line and points are plotted in different colors by specifying a second item on the same axes. """ #-------------------------- def setplot(plotdata): #-------------------------- """ Specify what is to be plotted at each frame. Input: plotdata, an instance of pyclaw.plotters.data.ClawPlotData. Output: a modified version of plotdata. """ plotdata.clearfigures() # clear any old figures,axes,items data # Figure for q[0] plotfigure = plotdata.new_plotfigure(name='Pressure', figno=1) # Set up for axes in this figure: plotaxes = plotfigure.new_plotaxes() plotaxes.xlimits = 'auto' plotaxes.ylimits = [-.5,1.1] plotaxes.title = 'Pressure' # Set up for item on these axes: plotitem = plotaxes.new_plotitem(name='line', plot_type='1d') plotitem.plot_var = 0 plotitem.plotstyle = '-' plotitem.color = 'b' # Set up for item on these axes: plotitem = plotaxes.new_plotitem(name='points', plot_type='1d') plotitem.plot_var = 0 plotitem.plotstyle = 'o' plotitem.color = '#ff00ff' # any color supported by matplotlib # Parameters used only when creating html and/or latex hardcopy # e.g., via pyclaw.plotters.frametools.printframes: plotdata.printfigs = True # print figures plotdata.print_format = 'png' # file format plotdata.print_framenos = 'all' # list of frames to print plotdata.print_fignos = 'all' # list of figures to print plotdata.html = True # create html files of plots? plotdata.html_homelink = '../README.html'# pointer for index page plotdata.latex = True # create latex file of plots? plotdata.latex_figsperline = 1 # layout of plots plotdata.latex_framesperline = 2 # layout of plots plotdata.latex_makepdf = True # also run pdflatex? return plotdata # -*- coding: utf-8 -*- import unittest from os import path import os import sys import shutil import json from subprocess import Popen, PIPE, call import pprint import codecs import_path = path.abspath(__file__) while path.split(import_path)[1] != 'fiware_api_blueprint_renderer': import_path = path.dirname(import_path) sys.path.append(import_path) from tests.test_utils import * special_section_test = None data_test_path=os.path.dirname(path.abspath(__file__))+"/special_sections.json" with open(data_test_path, 'r') as f: special_section_test = json.load(f) class TestSpecialSectionsInJSON(unittest.TestCase): __metaclass__ = TestCaseWithExamplesMetaclass @classmethod def setUpClass(cls): pathname_ = path.dirname(path.abspath(__file__)) cls.apib_file = pathname_+"/api_test.apib" cls.tmp_result_files = "/var/tmp/test-links-in-reference-160faf1aae1dd41c8f16746ea744f138" if os.path.exists(cls.tmp_result_files): shutil.rmtree(cls.tmp_result_files) os.makedirs(cls.tmp_result_files) Popen(["fabre", "-i", cls.apib_file, "-o", cls.tmp_result_files, "--no-clear-temp-dir"], stdout=PIPE, stderr=PIPE).communicate() with codecs.open('/var/tmp/fiware_api_blueprint_renderer_tmp/api_test.json', 'r', encoding='UTF-8') as f: doc = f.read() doc.decode(encoding="UTF-8") cls.out_json = json.loads(doc, encoding='UTF-8') @classmethod def tearDownClass(cls): if os.path.exists(cls.tmp_result_files): shutil.rmtree(cls.tmp_result_files) to_delete = ['/var/tmp/fiware_api_blueprint_renderer_tmp/api_test.apib', '/var/tmp/fiware_api_blueprint_renderer_tmp/api_test.extras', '/var/tmp/fiware_api_blueprint_renderer_tmp/api_test.json'] for filename in to_delete: if os.path.exists(filename): os.remove(filename) def test_special_sections_in_json(self): _json_special_section = self.out_json["api_metadata"]["subsections"][0]["subsections"] _special_section = json.dumps(special_section_test) _special_section = json.loads(_special_section) self.assertEqual(len(_json_special_section), len(_special_section)) for section in _json_special_section: expected_value = _special_section[section["id"]].encode('latin-1') obtained_value =section["body"].encode('latin-1') self.assertEqual(expected_value, obtained_value) suite = unittest.TestLoader().loadTestsFromTestCase(TestSpecialSectionsInJSON) unittest.TextTestRunner(verbosity=2).run(suite) # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Unit tests for the windowing classes.""" import unittest from apache_beam import coders from apache_beam.runners import pipeline_context class PipelineContextTest(unittest.TestCase): def test_deduplication(self): context = pipeline_context.PipelineContext() bytes_coder_ref = context.coders.get_id(coders.BytesCoder()) bytes_coder_ref2 = context.coders.get_id(coders.BytesCoder()) self.assertEqual(bytes_coder_ref, bytes_coder_ref2) def test_serialization(self): context = pipeline_context.PipelineContext() float_coder_ref = context.coders.get_id(coders.FloatCoder()) bytes_coder_ref = context.coders.get_id(coders.BytesCoder()) proto = context.to_runner_api() context2 = pipeline_context.PipelineContext.from_runner_api(proto) self.assertEqual( coders.FloatCoder(), context2.coders.get_by_id(float_coder_ref)) self.assertEqual( coders.BytesCoder(), context2.coders.get_by_id(bytes_coder_ref)) if __name__ == '__main__': unittest.main() # -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # ############################################################################## from openerp.osv import fields, osv from openerp.tools.translate import _ import openerp.addons.decimal_precision as dp class change_standard_price(osv.osv_memory): _name = "stock.change.standard.price" _description = "Change Standard Price" _columns = { 'new_price': fields.float('Price', required=True, digits_compute=dp.get_precision('Product Price'), help="If cost price is increased, stock variation account will be debited " "and stock output account will be credited with the value = (difference of amount * quantity available).\n" "If cost price is decreased, stock variation account will be creadited and stock input account will be debited."), } def default_get(self, cr, uid, fields, context=None): """ To get default values for the object. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param fields: List of fields for which we want default values @param context: A standard dictionary @return: A dictionary which of fields with values. """ if context is None: context = {} product_pool = self.pool.get('product.product') product_obj = product_pool.browse(cr, uid, context.get('active_id', False)) res = super(change_standard_price, self).default_get(cr, uid, fields, context=context) price = product_obj.standard_price if 'new_price' in fields: res.update({'new_price': price}) return res def change_price(self, cr, uid, ids, context=None): """ Changes the Standard Price of Product. And creates an account move accordingly. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of IDs selected @param context: A standard dictionary @return: """ if context is None: context = {} rec_id = context and context.get('active_id', False) assert rec_id, _('Active ID is not set in Context.') prod_obj = self.pool.get('product.product') res = self.browse(cr, uid, ids, context=context) prod_obj.do_change_standard_price(cr, uid, [rec_id], res[0].new_price, context) return {'type': 'ir.actions.act_window_close'} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: # coding=utf-8 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from pants_test.util.xml_test_base import XmlTestBase from pants.contrib.android.android_manifest_parser import AndroidManifest, AndroidManifestParser class TestAndroidManifestParser(XmlTestBase): """Test the AndroidManifestParser and AndroidManifest classes.""" # Test AndroidManifestParser.parse_manifest(). def test_parse_manifest(self): with self.xml_file() as xml: manifest = AndroidManifestParser.parse_manifest(xml) self.assertEqual(manifest.path, xml) def test_bad_parse_manifest(self): xml = '/no/file/here' with self.assertRaises(AndroidManifestParser.BadManifestError): AndroidManifestParser.parse_manifest(xml) # Test AndroidManifest.package_name. def test_package_name(self): with self.xml_file() as xml: manifest = AndroidManifestParser.parse_manifest(xml) self.assertEqual(manifest.package_name, 'org.pantsbuild.example.hello') def test_missing_manifest_element(self): with self.xml_file(manifest_element='some_other_element') as xml: with self.assertRaises(AndroidManifestParser.BadManifestError): AndroidManifestParser.parse_manifest(xml) def test_missing_package_attribute(self): with self.xml_file(package_attribute='bad_value') as xml: with self.assertRaises(AndroidManifestParser.BadManifestError): AndroidManifestParser.parse_manifest(xml) def test_weird_package_name(self): # Should accept unexpected package names, the info gets verified in classes that consume it. with self.xml_file(package_value='cola') as xml: manifest = AndroidManifestParser.parse_manifest(xml) self.assertEqual(manifest.package_name, 'cola') # Test AndroidManifest.target_sdk. def test_target_sdk(self): with self.xml_file() as xml: manifest = AndroidManifestParser.parse_manifest(xml) self.assertEqual(manifest.target_sdk, '19') # These next tests show AndroidManifest.target_sdk fails silently and returns None. def test_no_uses_sdk_element(self): with self.xml_file(uses_sdk_element='something-random') as xml: manifest = AndroidManifestParser.parse_manifest(xml) self.assertIsNone(manifest.target_sdk) def test_no_target_sdk_value(self): with self.xml_file(android_attribute='android:bad_value') as xml: parsed = AndroidManifestParser.parse_manifest(xml) self.assertIsNone(parsed.target_sdk) def test_no_android_part(self): with self.xml_file(android_attribute='unrelated:targetSdkVersion') as xml: manifest = AndroidManifestParser.parse_manifest(xml) self.assertEqual(manifest.package_name, 'org.pantsbuild.example.hello') def test_missing_whole_targetsdk(self): with self.xml_file(android_attribute='unrelated:cola') as xml: manifest = AndroidManifestParser.parse_manifest(xml) self.assertIsNone(manifest.target_sdk) # Test AndroidManifest(). def test_android_manifest(self): with self.xml_file() as xml: test = AndroidManifest(xml, '19', 'com.foo.bar') self.assertEqual(test.path, xml) #!/usr/bin/env python # - * - coding: UTF-8 - * - """ This script generates tests text-emphasis-style-property-011 ~ 020 which cover all possible values of text-emphasis-style property, except none and , with horizontal writing mode. It outputs a list of all tests it generated in the format of Mozilla reftest.list to the stdout. """ from __future__ import unicode_literals TEST_FILE = 'text-emphasis-style-property-{:03}{}.html' TEST_TEMPLATE = ''' CSS Test: text-emphasis-style: {title}

    Pass if there is a '{char}' above every character below:

    試験テスト
    ''' REF_FILE = 'text-emphasis-style-property-{:03}-ref.html' REF_TEMPLATE = ''' CSS Reference: text-emphasis-style: {0}

    Pass if there is a '{1}' above every character below:

    {1}{1}{1}{1}{1}
    ''' DATA_SET = [ ('dot', 0x2022, 0x25e6), ('circle', 0x25cf, 0x25cb), ('double-circle', 0x25c9, 0x25ce), ('triangle', 0x25b2, 0x25b3), ('sesame', 0xfe45, 0xfe46), ] SUFFIXES = ['', 'a', 'b', 'c', 'd', 'e'] def get_html_entity(code): return '&#x{:04X};'.format(code) def write_file(filename, content): with open(filename, 'wb') as f: f.write(content.encode('UTF-8')) def write_test_file(idx, suffix, style, code, name=None): if not name: name = style filename = TEST_FILE.format(idx, suffix) write_file(filename, TEST_TEMPLATE.format(index=idx, value=style, char=get_html_entity(code), code='U+{:04X}'.format(code), title=name)) print("== {} {}".format(filename, REF_FILE.format(idx))) idx = 10 def write_files(style, code): global idx idx += 1 fill, shape = style basic_style = "{} {}".format(fill, shape) write_file(REF_FILE.format(idx), REF_TEMPLATE.format(basic_style, get_html_entity(code))) suffix = iter(SUFFIXES) write_test_file(idx, next(suffix), basic_style, code) write_test_file(idx, next(suffix), "{} {}".format(shape, fill), code) if fill == 'filled': write_test_file(idx, next(suffix), shape, code) if shape == 'circle': write_test_file(idx, next(suffix), fill, code, fill + ', horizontal') print("# START tests from {}".format(__file__)) for name, code, _ in DATA_SET: write_files(('filled', name), code) for name, _, code in DATA_SET: write_files(('open', name), code) print("# END tests from {}".format(__file__)) #!/usr/bin/env python # -*- coding: utf-8 -*- import argparse import glob import os import sys from . import Command from .server import main from openerp.modules.module import get_module_root, MANIFEST from openerp.service.db import _create_empty_database, DatabaseExists class Start(Command): """Quick start the Odoo server for your project""" def get_module_list(self, path): mods = glob.glob(os.path.join(path, '*/%s' % MANIFEST)) return [mod.split(os.path.sep)[-2] for mod in mods] def run(self, cmdargs): parser = argparse.ArgumentParser( prog="%s start" % sys.argv[0].split(os.path.sep)[-1], description=self.__doc__ ) parser.add_argument('--path', default=".", help="Directory where your project's modules are stored (will autodetect from current dir)") parser.add_argument("-d", "--database", dest="db_name", default=None, help="Specify the database name (default to project's directory name") args, unknown = parser.parse_known_args(args=cmdargs) project_path = os.path.abspath(os.path.expanduser(os.path.expandvars(args.path))) module_root = get_module_root(project_path) db_name = None if module_root: # started in a module so we choose this module name for database db_name = project_path.split(os.path.sep)[-1] # go to the parent's directory of the module root project_path = os.path.abspath(os.path.join(project_path, os.pardir)) # check if one of the subfolders has at least one module mods = self.get_module_list(project_path) if mods and '--addons-path' not in cmdargs: cmdargs.append('--addons-path=%s' % project_path) if not args.db_name: args.db_name = db_name or project_path.split(os.path.sep)[-1] cmdargs.extend(('-d', args.db_name)) # TODO: forbid some database names ? eg template1, ... try: _create_empty_database(args.db_name) except DatabaseExists, e: pass except Exception, e: die("Could not create database `%s`. (%s)" % (args.db_name, e)) if '--db-filter' not in cmdargs: cmdargs.append('--db-filter=^%s$' % args.db_name) # Remove --path /-p options from the command arguments def to_remove(i, l): return l[i] == '-p' or l[i].startswith('--path') or \ (i > 0 and l[i-1] in ['-p', '--path']) cmdargs = [v for i, v in enumerate(cmdargs) if not to_remove(i, cmdargs)] main(cmdargs) def die(message, code=1): print >>sys.stderr, message sys.exit(code) #!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2013, Peter Sprygada # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ejabberd_user version_added: "1.5" author: "Peter Sprygada (@privateip)" short_description: Manages users for ejabberd servers requirements: - ejabberd with mod_admin_extra description: - This module provides user management for ejabberd servers options: username: description: - the name of the user to manage required: true host: description: - the ejabberd host associated with this username required: true password: description: - the password to assign to the username required: false logging: description: - enables or disables the local syslog facility for this module required: false default: false choices: [ 'true', 'false', 'yes', 'no' ] state: description: - describe the desired state of the user to be managed required: false default: 'present' choices: [ 'present', 'absent' ] notes: - Password parameter is required for state == present only - Passwords must be stored in clear text for this release - The ejabberd configuration file must include mod_admin_extra as a module. ''' EXAMPLES = ''' # Example playbook entries using the ejabberd_user module to manage users state. - name: create a user if it does not exists ejabberd_user: username: test host: server password: password - name: delete a user if it exists ejabberd_user: username: test host: server state: absent ''' import syslog from ansible.module_utils.pycompat24 import get_exception from ansible.module_utils.basic import AnsibleModule class EjabberdUserException(Exception): """ Base exception for EjabberdUser class object """ pass class EjabberdUser(object): """ This object represents a user resource for an ejabberd server. The object manages user creation and deletion using ejabberdctl. The following commands are currently supported: * ejabberdctl register * ejabberdctl deregister """ def __init__(self, module): self.module = module self.logging = module.params.get('logging') self.state = module.params.get('state') self.host = module.params.get('host') self.user = module.params.get('username') self.pwd = module.params.get('password') @property def changed(self): """ This method will check the current user and see if the password has changed. It will return True if the user does not match the supplied credentials and False if it does not """ try: options = [self.user, self.host, self.pwd] (rc, out, err) = self.run_command('check_password', options) except EjabberdUserException: e = get_exception() (rc, out, err) = (1, None, "required attribute(s) missing") return rc @property def exists(self): """ This method will check to see if the supplied username exists for host specified. If the user exists True is returned, otherwise False is returned """ try: options = [self.user, self.host] (rc, out, err) = self.run_command('check_account', options) except EjabberdUserException: e = get_exception() (rc, out, err) = (1, None, "required attribute(s) missing") return not bool(int(rc)) def log(self, entry): """ This method will log information to the local syslog facility """ if self.logging: syslog.openlog('ansible-%s' % self.module._name) syslog.syslog(syslog.LOG_NOTICE, entry) def run_command(self, cmd, options): """ This method will run the any command specified and return the returns using the Ansible common module """ if not all(options): raise EjabberdUserException cmd = 'ejabberdctl %s ' % cmd cmd += " ".join(options) self.log('command: %s' % cmd) return self.module.run_command(cmd.split()) def update(self): """ The update method will update the credentials for the user provided """ try: options = [self.user, self.host, self.pwd] (rc, out, err) = self.run_command('change_password', options) except EjabberdUserException: e = get_exception() (rc, out, err) = (1, None, "required attribute(s) missing") return (rc, out, err) def create(self): """ The create method will create a new user on the host with the password provided """ try: options = [self.user, self.host, self.pwd] (rc, out, err) = self.run_command('register', options) except EjabberdUserException: e = get_exception() (rc, out, err) = (1, None, "required attribute(s) missing") return (rc, out, err) def delete(self): """ The delete method will delete the user from the host """ try: options = [self.user, self.host] (rc, out, err) = self.run_command('unregister', options) except EjabberdUserException: e = get_exception() (rc, out, err) = (1, None, "required attribute(s) missing") return (rc, out, err) def main(): module = AnsibleModule( argument_spec=dict( host=dict(default=None, type='str'), username=dict(default=None, type='str'), password=dict(default=None, type='str', no_log=True), state=dict(default='present', choices=['present', 'absent']), logging=dict(default=False, type='bool') ), supports_check_mode=True ) obj = EjabberdUser(module) rc = None result = dict(changed=False) if obj.state == 'absent': if obj.exists: if module.check_mode: module.exit_json(changed=True) (rc, out, err) = obj.delete() if rc != 0: module.fail_json(msg=err, rc=rc) elif obj.state == 'present': if not obj.exists: if module.check_mode: module.exit_json(changed=True) (rc, out, err) = obj.create() elif obj.changed: if module.check_mode: module.exit_json(changed=True) (rc, out, err) = obj.update() if rc is not None and rc != 0: module.fail_json(msg=err, rc=rc) if rc is None: result['changed'] = False else: result['changed'] = True module.exit_json(**result) if __name__ == '__main__': main() # coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from pants.base.build_environment import get_buildroot, pants_version from pants.build_graph.aliased_target import AliasTargetFactory from pants.build_graph.build_file_aliases import BuildFileAliases from pants.build_graph.intransitive_dependency import (IntransitiveDependencyFactory, ProvidedDependencyFactory) from pants.build_graph.prep_command import PrepCommand from pants.build_graph.remote_sources import RemoteSources from pants.build_graph.resources import Resources from pants.build_graph.target import Target from pants.build_graph.target_scopes import ScopedDependencyFactory from pants.source.wrapped_globs import Globs, RGlobs, ZGlobs from pants.util.netrc import Netrc """Register the elementary BUILD file constructs.""" class BuildFilePath(object): def __init__(self, parse_context): self._parse_context = parse_context def __call__(self): """ :returns: The absolute path of this BUILD file. """ return os.path.join(get_buildroot(), self._parse_context.rel_path) def build_file_aliases(): return BuildFileAliases( targets={ 'alias': AliasTargetFactory(), 'prep_command': PrepCommand, 'resources': Resources, 'remote_sources': RemoteSources, 'target': Target, }, objects={ 'get_buildroot': get_buildroot, 'netrc': Netrc, 'pants_version': pants_version, }, context_aware_object_factories={ 'buildfile_path': BuildFilePath, 'globs': Globs, 'intransitive': IntransitiveDependencyFactory, 'provided': ProvidedDependencyFactory, 'rglobs': RGlobs, 'scoped': ScopedDependencyFactory, 'zglobs': ZGlobs, } ) # GNU Solfege - free ear training software # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2007, 2008, 2011 Tom Cato Amundsen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import absolute_import import sys from gi.repository import Gtk from solfege import gu from solfege import reportbug class TracebackWindow(Gtk.Dialog): def __init__(self, show_gtk_warnings): Gtk.Dialog.__init__(self) self.m_show_gtk_warnings = show_gtk_warnings self.set_default_size(630, 400) self.vbox.set_border_width(8) label = Gtk.Label(label=_("GNU Solfege message window")) label.set_name('Heading2') self.vbox.pack_start(label, False, False, 0) label = Gtk.Label(label=_("Please report this to the bug database or send an email to bug-solfege@gnu.org if the content of the message make you believe you have found a bug.")) label.set_line_wrap(True) self.vbox.pack_start(label, False, False, 0) scrollwin = Gtk.ScrolledWindow() scrollwin.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) self.vbox.pack_start(scrollwin, True, True, 0) self.g_text = Gtk.TextView() scrollwin.add(self.g_text) self.g_report = Gtk.Button() self.g_report.connect('clicked', self.do_report) box = Gtk.HBox() self.g_report.add(box) im = Gtk.Image.new_from_stock('gtk-execute', Gtk.IconSize.BUTTON) box.pack_start(im, True, True, 0) label = Gtk.Label() label.set_text_with_mnemonic(gu.escape(_('_Make automatic bug report'))) label.set_use_markup(True) box.pack_start(label, True, True, 0) self.action_area.pack_start(self.g_report, True, True, 0) self.g_close = Gtk.Button(stock='gtk-close') self.action_area.pack_start(self.g_close, True, True, 0) self.g_close.connect('clicked', lambda w: self.hide()) def do_report(self, *v): yesno = gu.dialog_yesno(_( "Automatic bug reports are often mostly useless because " "people omit their email address and add very little info " "about what happened. Fixing bugs is difficult if we " "cannot contact you and ask for more information.\n\n" "I would prefer if you open a web browser and report your " "bug to the bug tracker at http://bugs.solfege.org.\n\n" "This will give your bug report higher priority and it " "will be fixed faster.\n\nAre you willing to do that?")) if yesno: return self.m_send_exception = 'Nothing' b = self.g_text.get_buffer() d = reportbug.ReportBugWindow( self, b.get_text(b.get_start_iter(), b.get_end_iter(), False)) while 1: ret = d.run() if ret in (Gtk.ResponseType.REJECT, Gtk.ResponseType.DELETE_EVENT): break elif ret == reportbug.RESPONSE_SEND: self.m_send_exception = d.send_bugreport() break if self.m_send_exception != 'Nothing': if self.m_send_exception: m = Gtk.MessageDialog(self, Gtk.DialogFlags.MODAL, Gtk.MessageType.ERROR, Gtk.ButtonsType.CLOSE, "Sending bugreport failed:\n%s" % self.m_send_exception) else: m = Gtk.MessageDialog(self, Gtk.DialogFlags.MODAL, Gtk.MessageType.INFO, Gtk.ButtonsType.CLOSE, 'Report sent to http://www.solfege.org') m.run() m.destroy() d.destroy() def write(self, txt): if ("DeprecationWarning:" in txt) or \ (not self.m_show_gtk_warnings and ( "GtkWarning" in txt or "PangoWarning" in txt or ("Python C API version mismatch" in txt and ("solfege_c_midi" in txt or "swig" in txt)) )): return sys.stdout.write(txt) if txt.strip(): self.show_all() buffer = self.g_text.get_buffer() buffer.insert(buffer.get_end_iter(), txt) self.set_focus(self.g_close) def flush(self, *v): pass def close(self, *v): pass # # Secret Labs' Regular Expression Engine # # re-compatible interface for the sre matching engine # # Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved. # # This version of the SRE library can be redistributed under CNRI's # Python 1.6 license. For any other use, please contact Secret Labs # AB (info@pythonware.com). # # Portions of this engine have been developed in cooperation with # CNRI. Hewlett-Packard provided funding for 1.6 integration and # other compatibility work. # r"""Support for regular expressions (RE). This module provides regular expression matching operations similar to those found in Perl. It supports both 8-bit and Unicode strings; both the pattern and the strings being processed can contain null bytes and characters outside the US ASCII range. Regular expressions can contain both special and ordinary characters. Most ordinary characters, like "A", "a", or "0", are the simplest regular expressions; they simply match themselves. You can concatenate ordinary characters, so last matches the string 'last'. The special characters are: "." Matches any character except a newline. "^" Matches the start of the string. "$" Matches the end of the string or just before the newline at the end of the string. "*" Matches 0 or more (greedy) repetitions of the preceding RE. Greedy means that it will match as many repetitions as possible. "+" Matches 1 or more (greedy) repetitions of the preceding RE. "?" Matches 0 or 1 (greedy) of the preceding RE. *?,+?,?? Non-greedy versions of the previous three special characters. {m,n} Matches from m to n repetitions of the preceding RE. {m,n}? Non-greedy version of the above. "\\" Either escapes special characters or signals a special sequence. [] Indicates a set of characters. A "^" as the first character indicates a complementing set. "|" A|B, creates an RE that will match either A or B. (...) Matches the RE inside the parentheses. The contents can be retrieved or matched later in the string. (?aiLmsux) Set the A, I, L, M, S, U, or X flag for the RE (see below). (?:...) Non-grouping version of regular parentheses. (?P...) The substring matched by the group is accessible by name. (?P=name) Matches the text matched earlier by the group named name. (?#...) A comment; ignored. (?=...) Matches if ... matches next, but doesn't consume the string. (?!...) Matches if ... doesn't match next. (?<=...) Matches if preceded by ... (must be fixed length). (?= 0x02020000: __all__.append("finditer") def finditer(pattern, string, flags=0): """Return an iterator over all non-overlapping matches in the string. For each match, the iterator returns a match object. Empty matches are included in the result.""" return _compile(pattern, flags).finditer(string) def compile(pattern, flags=0): "Compile a regular expression pattern, returning a pattern object." return _compile(pattern, flags) def purge(): "Clear the regular expression caches" _cache.clear() _cache_repl.clear() def template(pattern, flags=0): "Compile a template pattern, returning a pattern object" return _compile(pattern, flags|T) _alphanum_str = frozenset( "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890") _alphanum_bytes = frozenset( b"_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890") def escape(pattern): """ Escape all the characters in pattern except ASCII letters, numbers and '_'. """ if isinstance(pattern, str): alphanum = _alphanum_str s = list(pattern) for i, c in enumerate(pattern): if c not in alphanum: if c == "\000": s[i] = "\\000" else: s[i] = "\\" + c return "".join(s) else: alphanum = _alphanum_bytes s = [] esc = ord(b"\\") for c in pattern: if c in alphanum: s.append(c) else: if c == 0: s.extend(b"\\000") else: s.append(esc) s.append(c) return bytes(s) # -------------------------------------------------------------------- # internals _cache = {} _cache_repl = {} _pattern_type = type(sre_compile.compile("", 0)) _MAXCACHE = 512 def _compile(pattern, flags): # internal: compile pattern bypass_cache = flags & DEBUG if not bypass_cache: try: return _cache[type(pattern), pattern, flags] except KeyError: pass if isinstance(pattern, _pattern_type): if flags: raise ValueError( "Cannot process flags argument with a compiled pattern") return pattern if not sre_compile.isstring(pattern): raise TypeError("first argument must be string or compiled pattern") p = sre_compile.compile(pattern, flags) if not bypass_cache: if len(_cache) >= _MAXCACHE: _cache.clear() _cache[type(pattern), pattern, flags] = p return p def _compile_repl(repl, pattern): # internal: compile replacement pattern try: return _cache_repl[repl, pattern] except KeyError: pass p = sre_parse.parse_template(repl, pattern) if len(_cache_repl) >= _MAXCACHE: _cache_repl.clear() _cache_repl[repl, pattern] = p return p def _expand(pattern, match, template): # internal: match.expand implementation hook template = sre_parse.parse_template(template, pattern) return sre_parse.expand_template(template, match) def _subx(pattern, template): # internal: pattern.sub/subn implementation helper template = _compile_repl(template, pattern) if not template[0] and len(template[1]) == 1: # literal replacement return template[1][0] def filter(match, template=template): return sre_parse.expand_template(template, match) return filter # register myself for pickling import copyreg def _pickle(p): return _compile, (p.pattern, p.flags) copyreg.pickle(_pattern_type, _pickle, _compile) # -------------------------------------------------------------------- # experimental stuff (see python-dev discussions for details) class Scanner: def __init__(self, lexicon, flags=0): from sre_constants import BRANCH, SUBPATTERN self.lexicon = lexicon # combine phrases into a compound pattern p = [] s = sre_parse.Pattern() s.flags = flags for phrase, action in lexicon: p.append(sre_parse.SubPattern(s, [ (SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))), ])) s.groups = len(p)+1 p = sre_parse.SubPattern(s, [(BRANCH, (None, p))]) self.scanner = sre_compile.compile(p) def scan(self, string): result = [] append = result.append match = self.scanner.scanner(string).match i = 0 while 1: m = match() if not m: break j = m.end() if i == j: break action = self.lexicon[m.lastindex-1][1] if callable(action): self.match = m action = action(self, m.group()) if action is not None: append(action) i = j return result, string[i:] import sublime, sublime_plugin try: # python 3 from .functions import * from .openfunctions import * except ValueError: # python 2 from functions import * from openfunctions import * def plugin_loaded(): ENVIRON['PATH'] += str( sublime.load_settings("TeXPreview.sublime-settings").get("latex_path") ) print("Your path for TeXPrevew:", ENVIRON['PATH']) class LatexPreviewEvent(sublime_plugin.EventListener): def on_selection_modified_async(self, view): global workingFiles fileName = view.file_name() if not(fileName in workingFiles): return currentProperties = workingFiles[fileName] if (sublime.load_settings( "TeXPreview.sublime-settings" ).get("external_view") == False): sublime_open(view, currentProperties) return if ((currentProperties.runProc != None) and (currentProperties.runProc.poll() != None)): currentProperties.runProc = None currentProperties.isRun = False if ((os.path.exists(currentProperties.resFileName))): fileDelete(currentProperties.resFileName) return if (currentProperties.isRun == False): return auto_reload = sublime.load_settings("TeXPreview.sublime-settings").get("auto_reload") if (auto_reload == False): return if (auto_reload == "application_reload"): applicationReload(view, currentProperties) return changePic(view, currentProperties) #def on_load_async(self, view): # ENVIRON['PATH'] += sublime.load_settings("TeXPreview.sublime-settings").get("latex_path") def on_pre_close(self, view): fileName = view.file_name() stopPrevew(fileName) dirPath = os.path.dirname(view.file_name())+os.path.sep +r'TeX_Preview_tmp' if ((os.path.exists(dirPath))): try: os.rmdir(dirPath) except: pass class LatexPreviewCommand(sublime_plugin.TextCommand): def run(self, view): fileName = self.view.file_name() if (fileName == None): return if (fileName[-4:] != '.tex'): return global workingFiles if not(fileName in workingFiles): workingFiles[fileName] = FileProperties() currentProperties = workingFiles[fileName] currentProperties.code = None currentProperties.isRun = True currentProperties.cutFunction = lambda x:cutEquation(x) if (sublime.load_settings( "TeXPreview.sublime-settings" ).get("external_view") == False): sublime_open(self.view, currentProperties) else: applicationReload(self.view, currentProperties) class LatexBlockPreviewCommand(sublime_plugin.TextCommand): def run(self, view): fileName = self.view.file_name() if (fileName == None): return if (fileName[-4:] != '.tex'): return global workingFiles if not(fileName in workingFiles): workingFiles[fileName] = FileProperties() currentProperties = workingFiles[fileName] currentProperties.code = None currentProperties.isRun = True currentProperties.cutFunction = lambda x:cutBlock(x) if (sublime.load_settings( "TeXPreview.sublime-settings" ).get("external_view") == False): sublime_open(self.view, currentProperties) else: applicationReload(self.view, currentProperties) class LatexStopPreviewCommand(sublime_plugin.TextCommand): def run(self, view): fileName = self.view.file_name() stopPrevew(fileName) self.view.window().destroy_output_panel("tex_pr_exec") # Generated by Django 2.2.5 on 2019-09-12 13:36 import datetime from django.conf import settings from django.db import migrations, models import django.db.models.deletion import django.utils.timezone import olympia.amo.fields import olympia.amo.models import uuid class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('addons', '0001_initial'), ] operations = [ migrations.CreateModel( name='BlogPost', fields=[ ('created', models.DateTimeField(blank=True, default=django.utils.timezone.now, editable=False)), ('modified', models.DateTimeField(auto_now=True)), ('id', olympia.amo.fields.PositiveAutoField(primary_key=True, serialize=False)), ('title', models.CharField(max_length=255)), ('date_posted', models.DateField(default=datetime.datetime.now)), ('permalink', models.CharField(max_length=255)), ], options={ 'db_table': 'blogposts', }, bases=(olympia.amo.models.SearchMixin, olympia.amo.models.SaveUpdateMixin, models.Model), ), migrations.CreateModel( name='RssKey', fields=[ ('id', olympia.amo.fields.PositiveAutoField(primary_key=True, serialize=False)), ('key', models.UUIDField(db_column='rsskey', default=uuid.uuid4, null=True, unique=True)), ('created', models.DateField(default=datetime.datetime.now)), ('addon', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='addons.Addon', unique=True)), ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, unique=True)), ], options={ 'db_table': 'hubrsskeys', }, ), ] #!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2014, Jakub Jirutka # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: layman author: "Jakub Jirutka (@jirutka)" version_added: "1.6" short_description: Manage Gentoo overlays description: - Uses Layman to manage an additional repositories for the Portage package manager on Gentoo Linux. Please note that Layman must be installed on a managed node prior using this module. requirements: - "python >= 2.6" - layman python module options: name: description: - The overlay id to install, synchronize, or uninstall. Use 'ALL' to sync all of the installed overlays (can be used only when C(state=updated)). required: true list_url: description: - An URL of the alternative overlays list that defines the overlay to install. This list will be fetched and saved under C(${overlay_defs})/${name}.xml), where C(overlay_defs) is readed from the Layman's configuration. required: false state: description: - Whether to install (C(present)), sync (C(updated)), or uninstall (C(absent)) the overlay. required: false default: present choices: [present, absent, updated] validate_certs: description: - If C(no), SSL certificates will not be validated. This should only be set to C(no) when no other option exists. Prior to 1.9.3 the code defaulted to C(no). required: false default: 'yes' choices: ['yes', 'no'] version_added: '1.9.3' ''' EXAMPLES = ''' # Install the overlay 'mozilla' which is on the central overlays list. - layman: name: mozilla # Install the overlay 'cvut' from the specified alternative list. - layman: name: cvut list_url: 'http://raw.github.com/cvut/gentoo-overlay/master/overlay.xml' # Update (sync) the overlay 'cvut', or install if not installed yet. - layman: name: cvut list_url: 'http://raw.github.com/cvut/gentoo-overlay/master/overlay.xml' state: updated # Update (sync) all of the installed overlays. - layman: name: ALL state: updated # Uninstall the overlay 'cvut'. - layman: name: cvut state: absent ''' import shutil from os import path try: from layman.api import LaymanAPI from layman.config import BareConfig HAS_LAYMAN_API = True except ImportError: HAS_LAYMAN_API = False USERAGENT = 'ansible-httpget' class ModuleError(Exception): pass def init_layman(config=None): '''Returns the initialized ``LaymanAPI``. :param config: the layman's configuration to use (optional) ''' if config is None: config = BareConfig(read_configfile=True, quietness=1) return LaymanAPI(config) def download_url(module, url, dest): ''' :param url: the URL to download :param dest: the absolute path of where to save the downloaded content to; it must be writable and not a directory :raises ModuleError ''' # Hack to add params in the form that fetch_url expects module.params['http_agent'] = USERAGENT response, info = fetch_url(module, url) if info['status'] != 200: raise ModuleError("Failed to get %s: %s" % (url, info['msg'])) try: with open(dest, 'w') as f: shutil.copyfileobj(response, f) except IOError as e: raise ModuleError("Failed to write: %s" % str(e)) def install_overlay(module, name, list_url=None): '''Installs the overlay repository. If not on the central overlays list, then :list_url of an alternative list must be provided. The list will be fetched and saved under ``%(overlay_defs)/%(name.xml)`` (location of the ``overlay_defs`` is read from the Layman's configuration). :param name: the overlay id :param list_url: the URL of the remote repositories list to look for the overlay definition (optional, default: None) :returns: True if the overlay was installed, or False if already exists (i.e. nothing has changed) :raises ModuleError ''' # read Layman configuration layman_conf = BareConfig(read_configfile=True) layman = init_layman(layman_conf) if layman.is_installed(name): return False if module.check_mode: mymsg = 'Would add layman repo \'' + name + '\'' module.exit_json(changed=True, msg=mymsg) if not layman.is_repo(name): if not list_url: raise ModuleError("Overlay '%s' is not on the list of known " \ "overlays and URL of the remote list was not provided." % name) overlay_defs = layman_conf.get_option('overlay_defs') dest = path.join(overlay_defs, name + '.xml') download_url(module, list_url, dest) # reload config layman = init_layman() if not layman.add_repos(name): raise ModuleError(layman.get_errors()) return True def uninstall_overlay(module, name): '''Uninstalls the given overlay repository from the system. :param name: the overlay id to uninstall :returns: True if the overlay was uninstalled, or False if doesn't exist (i.e. nothing has changed) :raises ModuleError ''' layman = init_layman() if not layman.is_installed(name): return False if module.check_mode: mymsg = 'Would remove layman repo \'' + name + '\'' module.exit_json(changed=True, msg=mymsg) layman.delete_repos(name) if layman.get_errors(): raise ModuleError(layman.get_errors()) return True def sync_overlay(name): '''Synchronizes the specified overlay repository. :param name: the overlay repository id to sync :raises ModuleError ''' layman = init_layman() if not layman.sync(name): messages = [ str(item[1]) for item in layman.sync_results[2] ] raise ModuleError(messages) def sync_overlays(): '''Synchronize all of the installed overlays. :raises ModuleError ''' layman = init_layman() for name in layman.get_installed(): sync_overlay(name) def main(): # define module module = AnsibleModule( argument_spec = dict( name = dict(required=True), list_url = dict(aliases=['url']), state = dict(default="present", choices=['present', 'absent', 'updated']), validate_certs = dict(required=False, default=True, type='bool'), ), supports_check_mode=True ) if not HAS_LAYMAN_API: module.fail_json(msg='Layman is not installed') state, name, url = (module.params[key] for key in ['state', 'name', 'list_url']) changed = False try: if state == 'present': changed = install_overlay(module, name, url) elif state == 'updated': if name == 'ALL': sync_overlays() elif install_overlay(module, name, url): changed = True else: sync_overlay(name) else: changed = uninstall_overlay(module, name) except ModuleError as e: module.fail_json(msg=e.message) else: module.exit_json(changed=changed, name=name) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.urls import * if __name__ == '__main__': main() """ Test that variables with unsigned types display correctly. """ import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * import lldbsuite.test.lldbutil as lldbutil class UnsignedTypesTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) def test(self): """Test that variables with unsigned types display correctly.""" self.build() lldbutil.run_to_source_breakpoint(self, "// Set break point at this line", lldb.SBFileSpec("main.cpp")) # Test that unsigned types display correctly. self.expect( "frame variable --show-types --no-args", VARIABLES_DISPLAYED_CORRECTLY, patterns=["\((short unsigned int|unsigned short)\) the_unsigned_short = 99"], substrs=[ "(unsigned char) the_unsigned_char = 'c'", "(unsigned int) the_unsigned_int = 99", "(unsigned long) the_unsigned_long = 99", "(unsigned long long) the_unsigned_long_long = 99", "(uint32_t) the_uint32 = 99"]) # Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Various function for graph rerouting.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.graph_editor import subgraph from tensorflow.contrib.graph_editor import util from tensorflow.python.framework import ops as tf_ops def _check_ts_compatibility(ts0, ts1): """Make sure the shape and dtype of the two tensor's lists are compatible. Args: ts0: an object convertible to a list of tf.Tensor. ts1: an object convertible to a list of tf.Tensor. Raises: ValueError: if any pair of tensors (same index in ts0 and ts1) have a dtype or a shape which is not compatible. """ ts0 = util.make_list_of_t(ts0) ts1 = util.make_list_of_t(ts1) if len(ts0) != len(ts1): raise ValueError("ts0 and ts1 have different sizes: {} != {}".format( len(ts0), len(ts1))) for t0, t1 in zip(ts0, ts1): # check dtype dtype0, dtype1 = t0.dtype, t1.dtype if not dtype0.is_compatible_with(dtype1): raise ValueError("Dtypes {} and {} are not compatible.".format(dtype0, dtype1)) # check shape shape0, shape1 = t0.get_shape(), t1.get_shape() if not shape0.is_compatible_with(shape1): raise ValueError("Shapes {} and {} are not compatible.".format(shape0, shape1)) class _RerouteMode(object): """Enums for reroute's mode. swap: the end of tensors a and b are swapped. a2b: the end of the tensor a are also rerouted to the end of the tensor b (the end of b is left dangling). b2a: the end of the tensor b are also rerouted to the end of the tensor a (the end of a is left dangling). """ swap, a2b, b2a = range(3) @classmethod def check(cls, mode): """Check swap mode. Args: mode: an integer representing one of the modes. Returns: True if a is rerouted to b (mode is swap or a2b). True if b is rerouted to a (mode is swap or b2a). Raises: ValueError: if mode is outside the enum range. """ if mode == cls.swap: return True, True elif mode == cls.b2a: return False, True elif mode == cls.a2b: return True, False else: raise ValueError("Unknown _RerouteMode: {}".format(mode)) def _reroute_t(t0, t1, consumers1, can_modify=None, cannot_modify=None): """Reroute the end of the tensors (t0,t1). Warning: this function is directly manipulating the internals of the tf.Graph. Args: t0: a tf.Tensor. t1: a tf.Tensor. consumers1: The consumers of t1 which needs to be rerouted. can_modify: iterable of operations which can be modified. Any operation outside within_ops will be left untouched by this function. cannot_modify: iterable of operations which cannot be modified. Any operation within cannot_modify will be left untouched by this function. Returns: The number of individual modifications made by the function. """ nb_update_inputs = 0 if can_modify is not None: consumers1 &= can_modify if cannot_modify is not None: consumers1 -= cannot_modify consumers1_indices = {} for consumer1 in consumers1: consumers1_indices[consumer1] = [i for i, t in enumerate(consumer1.inputs) if t is t1] for consumer1 in consumers1: for i in consumers1_indices[consumer1]: consumer1._update_input(i, t0) # pylint: disable=protected-access nb_update_inputs += 1 return nb_update_inputs def _reroute_ts(ts0, ts1, mode, can_modify=None, cannot_modify=None): """Reroute the end of the tensors in each pair (t0,t1) in ts0 x ts1. This function is the back-bone of the Graph-Editor. It is essentially a thin wrapper on top of the tf.Operation._update_input. Given a pair of tensor t0, t1 in ts0 x ts1, this function re-route the end of t0 and t1 in three possible ways: 1) The reroute mode is "a<->b" or "b<->a": the tensors' end are swapped. After this operation, the previous consumers of t0 are now consumers of t1 and vice-versa. 2) The reroute mode is "a->b": the tensors' end of t0 are re-routed to the tensors's end of t1 (which are left dangling). After this operation, the previous consumers of t0 are still consuming t0 but the previous consumers of t1 are not also consuming t0. The tensor t1 has no consumer. 3) The reroute mode is "b->a": this mode is the symmetric of the "a->b" mode. Note that this function is re-routing the end of two tensors, not the start. Re-routing the start of two tensors is not supported by this library. The reason for that is the following: TensorFlow, by design, creates a strong bond between an op and its output tensor. This Graph editor follows this design and treats an operation A and its generating tensors {t_i} as an entity which cannot be broken. In other words, an op cannot be detached from any of its output tensors, ever. But it is possible to detach an op from its input tensors, which is what this function concerns itself with. Warning: this function is directly manipulating the internals of the tf.Graph. Args: ts0: an object convertible to a list of tf.Tensor. ts1: an object convertible to a list of tf.Tensor. mode: what to do with those tensors: "a->b" or "b<->a" for swaping and "a->b" or "b->a" for one direction re-routing. can_modify: iterable of operations which can be modified. Any operation outside within_ops will be left untouched by this function. cannot_modify: iterable of operations which cannot be modified. Any operation within cannot_modify will be left untouched by this function. Returns: The number of individual modifications made by the function. Raises: TypeError: if ts0 or ts1 cannot be converted to a list of tf.Tensor. TypeError: if can_modify or cannot_modify is not None and cannot be converted to a list of tf.Operation. """ a2b, b2a = _RerouteMode.check(mode) ts0 = util.make_list_of_t(ts0) ts1 = util.make_list_of_t(ts1) _check_ts_compatibility(ts0, ts1) if cannot_modify is not None: cannot_modify = frozenset(util.make_list_of_op(cannot_modify)) if can_modify is not None: can_modify = frozenset(util.make_list_of_op(can_modify)) nb_update_inputs = 0 for t0, t1 in zip(ts0, ts1): if t0 is t1: continue # Silently ignore identical tensors. consumers0 = set(t0.consumers()) consumers1 = set(t1.consumers()) if a2b: nb_update_inputs += _reroute_t(t0, t1, consumers1, can_modify, cannot_modify) if b2a: nb_update_inputs += _reroute_t(t1, t0, consumers0, can_modify, cannot_modify) return nb_update_inputs def swap_ts(ts0, ts1, can_modify=None, cannot_modify=None): """For each tensor's pair, swap the end of (t0,t1). B0 B1 B0 B1 | | => X A0 A1 A0 A1 Args: ts0: an object convertible to a list of tf.Tensor. ts1: an object convertible to a list of tf.Tensor. can_modify: iterable of operations which can be modified. Any operation outside within_ops will be left untouched by this function. cannot_modify: iterable of operations which cannot be modified. Any operation within cannot_modify will be left untouched by this function. Returns: the number of individual modifications made by the function. Raises: TypeError: if ts0 or ts1 cannot be converted to a list of tf.Tensor. TypeError: if can_modify or cannot_modify is not None and cannot be converted to a list of tf.Operation. """ return _reroute_ts(ts0, ts1, _RerouteMode.swap, can_modify, cannot_modify) def reroute_a2b_ts(ts0, ts1, can_modify=None, cannot_modify=None): """For each tensor's pair, replace the end of t1 by the end of t0. B0 B1 B0 B1 | | => |/ A0 A1 A0 A1 The end of the tensors in ts1 are left dangling. Args: ts0: an object convertible to a list of tf.Tensor. ts1: an object convertible to a list of tf.Tensor. can_modify: iterable of operations which can be modified. Any operation outside within_ops will be left untouched by this function. cannot_modify: iterable of operations which cannot be modified. Any operation within cannot_modify will be left untouched by this function. Returns: the number of individual modifications made by the function. Raises: TypeError: if ts0 or ts1 cannot be converted to a list of tf.Tensor. TypeError: if can_modify or cannot_modify is not None and cannot be converted to a list of tf.Operation. """ return _reroute_ts(ts0, ts1, _RerouteMode.a2b, can_modify, cannot_modify) def reroute_b2a_ts(ts0, ts1, can_modify=None, cannot_modify=None): r"""For each tensor's pair, replace the end of t0 by the end of t1. B0 B1 B0 B1 | | => \| A0 A1 A0 A1 The end of the tensors in ts0 are left dangling. Args: ts0: an object convertible to a list of tf.Tensor. ts1: an object convertible to a list of tf.Tensor. can_modify: iterable of operations which can be modified. Any operation outside within_ops will be left untouched by this function. cannot_modify: iterable of operations which cannot be modified. Any operation within cannot_modify will be left untouched by this function. Returns: the number of individual modifications made by the function. Raises: TypeError: if ts0 or ts1 cannot be converted to a list of tf.Tensor. TypeError: if can_modify or cannot_modify is not None and cannot be converted to a list of tf.Operation. """ return _reroute_ts(ts0, ts1, _RerouteMode.b2a, can_modify, cannot_modify) def _reroute_sgv_remap(sgv0, sgv1, mode): """Remap in place the inputs of two subgraph views to mimic the reroute. This function is meant to used by reroute_inputs only. Args: sgv0: the first subgraph to have its inputs remapped. sgv1: the second subgraph to have its inputs remapped. mode: reroute mode, see _reroute_ts(...). Raises: TypeError: if svg0 or svg1 are not SubGraphView. ValueError: if sgv0 and sgv1 do not belong to the same graph. """ a2b, b2a = _RerouteMode.check(mode) if not isinstance(sgv0, subgraph.SubGraphView): raise TypeError("Expected a SubGraphView, got {}".format(type(sgv0))) if not isinstance(sgv1, subgraph.SubGraphView): raise TypeError("Expected a SubGraphView, got {}".format(type(sgv1))) util.check_graphs(sgv0, sgv1) sgv0_ = sgv0.copy() sgv1_ = sgv1.copy() # pylint: disable=protected-access if a2b and b2a: (sgv0_._input_ts, sgv1_._input_ts) = ( sgv1_._input_ts, sgv0_._input_ts) (sgv0_._passthrough_ts, sgv1_._passthrough_ts) = ( sgv1_._passthrough_ts, sgv0_._passthrough_ts) elif a2b: sgv1_._input_ts = sgv0_._input_ts[:] sgv1_._passthrough_ts = sgv0_._passthrough_ts[:] elif b2a: sgv0_._input_ts = sgv1_._input_ts[:] sgv0_._passthrough_ts = sgv1_._passthrough_ts[:] # pylint: enable=protected-access # Update the passthrough outputs as well. def update_passthrough_outputs(a, b): # pylint: disable=protected-access for i, t in enumerate(b._output_ts): if t in a._passthrough_ts: ii = a._input_ts.index(t) b._output_ts[i] = b._input_ts[ii] # pylint: enable=protected-access if a2b: update_passthrough_outputs(sgv0_, sgv1_) if b2a: update_passthrough_outputs(sgv1_, sgv0_) # in-place # pylint: disable=protected-access sgv0._assign_from(sgv0_) sgv1._assign_from(sgv1_) # pylint: enable=protected-access def _reroute_sgv_inputs(sgv0, sgv1, mode): """Re-route all the inputs of two subgraphs. Args: sgv0: the first subgraph to have its inputs swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. sgv1: the second subgraph to have its inputs swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. mode: reroute mode, see _reroute_ts(...). Returns: Two new subgraph views with their inputs swapped. Note that sgv0 and sgv1 are also modified in place. Raises: StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using the same rules than the function subgraph.make_view. """ sgv0 = subgraph.make_view(sgv0) sgv1 = subgraph.make_view(sgv1) util.check_graphs(sgv0, sgv1) can_modify = sgv0.ops + sgv1.ops # also allow consumers of passthrough to be modified: can_modify += util.get_consuming_ops(sgv0.passthroughs) can_modify += util.get_consuming_ops(sgv1.passthroughs) _reroute_ts(sgv0.inputs, sgv1.inputs, mode, can_modify=can_modify) _reroute_sgv_remap(sgv0, sgv1, mode) return sgv0, sgv1 def _reroute_sgv_outputs(sgv0, sgv1, mode): """Re-route all the outputs of two operations. Args: sgv0: the first subgraph to have its outputs swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. sgv1: the second subgraph to have its outputs swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. mode: reroute mode, see _reroute_ts(...). Returns: Two new subgraph views with their outputs swapped. Note that sgv0 and sgv1 are also modified in place. Raises: StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using the same rules than the function subgraph.make_view. """ sgv0 = subgraph.make_view(sgv0) sgv1 = subgraph.make_view(sgv1) util.check_graphs(sgv0, sgv1) cannot_modify = sgv0.ops + sgv1.ops _reroute_ts(sgv0.outputs, sgv1.outputs, mode, cannot_modify=cannot_modify) return sgv0, sgv1 def _reroute_sgv(sgv0, sgv1, mode): """Re-route both the inputs and the outputs of the two subgraph views. This involves swapping all the inputs/ouputs of the two subgraph views. Args: sgv0: the first subgraph to be swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. sgv1: the second subgraph to be swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. mode: reroute mode, see _reroute_ts(...). Returns: Two new subgraph views with their outputs and inputs swapped. Note that sgv0 and sgv1 are also modified in place. Raises: StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using the same rules than the function subgraph.make_view. """ _reroute_sgv_outputs(sgv0, sgv1, mode) _reroute_sgv_inputs(sgv0, sgv1, mode) return sgv0, sgv1 def swap_inputs(sgv0, sgv1): """Swap all the inputs of sgv0 and sgv1 (see reroute_inputs).""" return _reroute_sgv_inputs(sgv0, sgv1, _RerouteMode.swap) def reroute_a2b_inputs(sgv0, sgv1): """Re-route all the inputs of sgv0 to sgv1 (see reroute_inputs).""" return _reroute_sgv_inputs(sgv0, sgv1, _RerouteMode.a2b) def reroute_b2a_inputs(sgv0, sgv1): """Re-route all the inputs of sgv1 to sgv0 (see reroute_inputs).""" return _reroute_sgv_inputs(sgv0, sgv1, _RerouteMode.b2a) def swap_outputs(sgv0, sgv1): """Swap all the outputs of sgv0 and sgv1 (see _reroute_outputs).""" return _reroute_sgv_outputs(sgv0, sgv1, _RerouteMode.swap) def reroute_a2b_outputs(sgv0, sgv1): """Re-route all the outputs of sgv0 to sgv1 (see _reroute_outputs).""" return _reroute_sgv_outputs(sgv0, sgv1, _RerouteMode.a2b) def reroute_b2a_outputs(sgv0, sgv1): """Re-route all the outputs of sgv1 to sgv0 (see _reroute_outputs).""" return _reroute_sgv_outputs(sgv0, sgv1, _RerouteMode.b2a) def swap(sgv0, sgv1): """Swap the inputs and outputs of sgv1 to sgv0 (see _reroute).""" return _reroute_sgv(sgv0, sgv1, _RerouteMode.swap) def reroute_a2b(sgv0, sgv1): """Re-route the inputs and outputs of sgv0 to sgv1 (see _reroute).""" return _reroute_sgv(sgv0, sgv1, _RerouteMode.a2b) def reroute_b2a(sgv0, sgv1): """Re-route the inputs and outputs of sgv1 to sgv0 (see _reroute).""" return _reroute_sgv(sgv0, sgv1, _RerouteMode.b2a) def remove_control_inputs(op, cops): """Remove the control inputs cops from co. Warning: this function is directly manipulating the internals of the tf.Graph. Args: op: a tf.Operation from which to remove the control inputs. cops: an object convertible to a list of tf.Operation. Raises: TypeError: if op is not a tf.Operation ValueError: if any cop in cops is not a control input of op. """ if not isinstance(op, tf_ops.Operation): raise TypeError("Expected a tf.Operation, got: {}", type(op)) cops = util.make_list_of_op(cops, allow_graph=False) for cop in cops: if cop not in op.control_inputs: raise ValueError("{} is not a control_input of {}".format(op.name, cop.name)) # pylint: disable=protected-access op._control_inputs = [cop for cop in op._control_inputs if cop not in cops] op._recompute_node_def() # pylint: enable=protected-access def add_control_inputs(op, cops): """Add the control inputs cops to co. Warning: this function is directly manipulating the internals of the tf.Graph. Args: op: a tf.Operation to which the control inputs are added. cops: an object convertible to a list of tf.Operation. Raises: TypeError: if op is not a tf.Operation ValueError: if any cop in cops is already a control input of op. """ if not isinstance(op, tf_ops.Operation): raise TypeError("Expected a tf.Operation, got: {}", type(op)) cops = util.make_list_of_op(cops, allow_graph=False) for cop in cops: if cop in op.control_inputs: raise ValueError("{} is already a control_input of {}".format(op.name, cop.name)) # pylint: disable=protected-access op._control_inputs += cops op._recompute_node_def() # pylint: enable=protected-access from io import StringIO import unittest from ..processor import process_render class ProcessorTest(unittest.TestCase): maxDiff = None def test_basics(self): html = """ Hello

    Science was done

    """, ) self.assertEqual(output["links"], '\n') self.assertEqual(output["styles"], "\n") self.assertEqual(output["scripts"], "\n") self.assertEqual(output["abstract"], "Science was done") self.assertEqual(output["first_image"], "prefix/first_image.gif") def test_arxiv_urls_are_converted_to_vanity_urls(self): html = 'Something' output = process_render(StringIO(html), "", {}) self.assertEqual( output["body"], 'Something', ) def test_emails_are_removed(self): html = 'some email link another@email.com' output = process_render(StringIO(html), "", {}) self.assertEqual( output["body"], "some email link ", ) # encoding=utf-8 '''Key-value pairs.''' import collections import gettext import io import textwrap from wpull.collections import OrderedDefaultDict _ = gettext.gettext class NameValueRecord(collections.MutableMapping): '''An ordered mapping of name-value pairs. Duplicated names are accepted. .. seealso:: http://tools.ietf.org/search/draft-kunze-anvl-02 ''' def __init__(self, normalize_overrides=None, encoding='utf-8', wrap_width=None): self._map = OrderedDefaultDict(list) self.raw = None self.encoding = encoding self._normalize_overrides = normalize_overrides self._wrap_width = wrap_width def parse(self, string, strict=True): '''Parse the string or bytes. Args: strict (bool): If True, errors will not be ignored Raises: :class:`ValueError` if the record is malformed. ''' if isinstance(string, bytes): errors = 'strict' if strict else 'replace' string = string.decode(self.encoding, errors=errors) if not self.raw: self.raw = string else: self.raw += string lines = unfold_lines(string).splitlines() for line in lines: if line: if ':' not in line: if strict: raise ValueError('Field missing colon.') else: continue name, value = line.split(':', 1) name = name.strip() value = value.strip() self.add(name, value) def __getitem__(self, name): normalized_name = normalize_name(name, self._normalize_overrides) if normalized_name in self._map: if self._map[normalized_name]: return self._map[normalized_name][0] raise KeyError(name) def __setitem__(self, name, value): normalized_name = normalize_name(name, self._normalize_overrides) self._map[normalized_name][:] = (value,) def __delitem__(self, name): del self._map[normalize_name(name, self._normalize_overrides)] def __iter__(self): return iter(self._map) def __len__(self): return len(self._map) def add(self, name, value): '''Append the name-value pair to the record.''' normalized_name = normalize_name(name, self._normalize_overrides) self._map[normalized_name].append(value) def get_list(self, name): '''Return all the values for given name.''' normalized_name = normalize_name(name, self._normalize_overrides) return self._map[normalized_name] def get_all(self): '''Return an iterator of name-value pairs.''' for name, values in self._map.items(): for value in values: yield (name, value) def __str__(self): return self.to_str() def to_str(self): '''Convert to string.''' pairs = [] for name, value in self.get_all(): if value and self._wrap_width: pairs.append('{0}:{1}'.format( name, '\r\n'.join(textwrap.wrap( value, width=self._wrap_width, drop_whitespace=False, initial_indent=' ', subsequent_indent=' ' )) )) elif value: pairs.append('{0}: {1}'.format(name, value)) else: pairs.append('{0}:'.format(name)) pairs.append('') return '\r\n'.join(pairs) def __bytes__(self): return self.to_bytes() def to_bytes(self, errors='strict'): '''Convert to bytes.''' return str(self).encode(self.encoding, errors=errors) def normalize_name(name, overrides=None): '''Normalize the key name to title case. For example, ``normalize_name('content-id')`` will become ``Content-Id`` Args: name (str): The name to normalize. overrides (set, sequence): A set or sequence containing keys that should be cased to themselves. For example, passing ``set('WARC-Type')`` will normalize any key named "warc-type" to ``WARC-Type`` instead of the default ``Warc-Type``. Returns: str ''' normalized_name = name.title() if overrides: override_map = dict([(name.title(), name) for name in overrides]) return override_map.get(normalized_name, normalized_name) else: return normalized_name def guess_line_ending(string): '''Return the most likely line delimiter from the string.''' assert isinstance(string, str), 'Expect str. Got {}'.format(type(string)) crlf_count = string.count('\r\n') lf_count = string.count('\n') if crlf_count >= lf_count: return '\r\n' else: return '\n' def unfold_lines(string): '''Join lines that are wrapped. Any line that starts with a space or tab is joined to the previous line. ''' assert isinstance(string, str), 'Expect str. Got {}'.format(type(string)) lines = string.splitlines() line_buffer = io.StringIO() for line_number in range(len(lines)): line = lines[line_number] if line and line[0:1] in (' ', '\t'): line_buffer.write(' ') elif line_number != 0: line_buffer.write('\r\n') line_buffer.write(line.strip()) line_buffer.write('\r\n') return line_buffer.getvalue() from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ExtractorError class TestURLIE(InfoExtractor): """ Allows addressing of the test cases as test:yout.*be_1 """ IE_DESC = False # Do not list _VALID_URL = r'test(?:url)?:(?P(?P.+?)(?:_(?P[0-9]+))?)$' def _real_extract(self, url): from ..extractor import gen_extractors mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') extractor_id = mobj.group('extractor') all_extractors = gen_extractors() rex = re.compile(extractor_id, flags=re.IGNORECASE) matching_extractors = [ e for e in all_extractors if rex.search(e.IE_NAME)] if len(matching_extractors) == 0: raise ExtractorError( 'No extractors matching %r found' % extractor_id, expected=True) elif len(matching_extractors) > 1: # Is it obvious which one to pick? try: extractor = next( ie for ie in matching_extractors if ie.IE_NAME.lower() == extractor_id.lower()) except StopIteration: raise ExtractorError( ('Found multiple matching extractors: %s' % ' '.join(ie.IE_NAME for ie in matching_extractors)), expected=True) else: extractor = matching_extractors[0] num_str = mobj.group('num') num = int(num_str) if num_str else 0 testcases = [] t = getattr(extractor, '_TEST', None) if t: testcases.append(t) testcases.extend(getattr(extractor, '_TESTS', [])) try: tc = testcases[num] except IndexError: raise ExtractorError( ('Test case %d not found, got only %d tests' % (num, len(testcases))), expected=True) self.to_screen('Test URL: %s' % tc['url']) return { '_type': 'url', 'url': tc['url'], 'id': video_id, } # event/registry.py # Copyright (C) 2005-2017 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Provides managed registration services on behalf of :func:`.listen` arguments. By "managed registration", we mean that event listening functions and other objects can be added to various collections in such a way that their membership in all those collections can be revoked at once, based on an equivalent :class:`._EventKey`. """ from __future__ import absolute_import import weakref import collections import types from .. import exc, util _key_to_collection = collections.defaultdict(dict) """ Given an original listen() argument, can locate all listener collections and the listener fn contained (target, identifier, fn) -> { ref(listenercollection) -> ref(listener_fn) ref(listenercollection) -> ref(listener_fn) ref(listenercollection) -> ref(listener_fn) } """ _collection_to_key = collections.defaultdict(dict) """ Given a _ListenerCollection or _ClsLevelListener, can locate all the original listen() arguments and the listener fn contained ref(listenercollection) -> { ref(listener_fn) -> (target, identifier, fn), ref(listener_fn) -> (target, identifier, fn), ref(listener_fn) -> (target, identifier, fn), } """ def _collection_gced(ref): # defaultdict, so can't get a KeyError if not _collection_to_key or ref not in _collection_to_key: return listener_to_key = _collection_to_key.pop(ref) for key in listener_to_key.values(): if key in _key_to_collection: # defaultdict, so can't get a KeyError dispatch_reg = _key_to_collection[key] dispatch_reg.pop(ref) if not dispatch_reg: _key_to_collection.pop(key) def _stored_in_collection(event_key, owner): key = event_key._key dispatch_reg = _key_to_collection[key] owner_ref = owner.ref listen_ref = weakref.ref(event_key._listen_fn) if owner_ref in dispatch_reg: return False dispatch_reg[owner_ref] = listen_ref listener_to_key = _collection_to_key[owner_ref] listener_to_key[listen_ref] = key return True def _removed_from_collection(event_key, owner): key = event_key._key dispatch_reg = _key_to_collection[key] listen_ref = weakref.ref(event_key._listen_fn) owner_ref = owner.ref dispatch_reg.pop(owner_ref, None) if not dispatch_reg: del _key_to_collection[key] if owner_ref in _collection_to_key: listener_to_key = _collection_to_key[owner_ref] listener_to_key.pop(listen_ref) def _stored_in_collection_multi(newowner, oldowner, elements): if not elements: return oldowner = oldowner.ref newowner = newowner.ref old_listener_to_key = _collection_to_key[oldowner] new_listener_to_key = _collection_to_key[newowner] for listen_fn in elements: listen_ref = weakref.ref(listen_fn) key = old_listener_to_key[listen_ref] dispatch_reg = _key_to_collection[key] if newowner in dispatch_reg: assert dispatch_reg[newowner] == listen_ref else: dispatch_reg[newowner] = listen_ref new_listener_to_key[listen_ref] = key def _clear(owner, elements): if not elements: return owner = owner.ref listener_to_key = _collection_to_key[owner] for listen_fn in elements: listen_ref = weakref.ref(listen_fn) key = listener_to_key[listen_ref] dispatch_reg = _key_to_collection[key] dispatch_reg.pop(owner, None) if not dispatch_reg: del _key_to_collection[key] class _EventKey(object): """Represent :func:`.listen` arguments. """ __slots__ = ( 'target', 'identifier', 'fn', 'fn_key', 'fn_wrap', 'dispatch_target' ) def __init__(self, target, identifier, fn, dispatch_target, _fn_wrap=None): self.target = target self.identifier = identifier self.fn = fn if isinstance(fn, types.MethodType): self.fn_key = id(fn.__func__), id(fn.__self__) else: self.fn_key = id(fn) self.fn_wrap = _fn_wrap self.dispatch_target = dispatch_target @property def _key(self): return (id(self.target), self.identifier, self.fn_key) def with_wrapper(self, fn_wrap): if fn_wrap is self._listen_fn: return self else: return _EventKey( self.target, self.identifier, self.fn, self.dispatch_target, _fn_wrap=fn_wrap ) def with_dispatch_target(self, dispatch_target): if dispatch_target is self.dispatch_target: return self else: return _EventKey( self.target, self.identifier, self.fn, dispatch_target, _fn_wrap=self.fn_wrap ) def listen(self, *args, **kw): once = kw.pop("once", False) named = kw.pop("named", False) target, identifier, fn = \ self.dispatch_target, self.identifier, self._listen_fn dispatch_collection = getattr(target.dispatch, identifier) adjusted_fn = dispatch_collection._adjust_fn_spec(fn, named) self = self.with_wrapper(adjusted_fn) if once: self.with_wrapper( util.only_once(self._listen_fn)).listen(*args, **kw) else: self.dispatch_target.dispatch._listen(self, *args, **kw) def remove(self): key = self._key if key not in _key_to_collection: raise exc.InvalidRequestError( "No listeners found for event %s / %r / %s " % (self.target, self.identifier, self.fn) ) dispatch_reg = _key_to_collection.pop(key) for collection_ref, listener_ref in dispatch_reg.items(): collection = collection_ref() listener_fn = listener_ref() if collection is not None and listener_fn is not None: collection.remove(self.with_wrapper(listener_fn)) def contains(self): """Return True if this event key is registered to listen. """ return self._key in _key_to_collection def base_listen(self, propagate=False, insert=False, named=False): target, identifier, fn = \ self.dispatch_target, self.identifier, self._listen_fn dispatch_collection = getattr(target.dispatch, identifier) if insert: dispatch_collection.\ for_modify(target.dispatch).insert(self, propagate) else: dispatch_collection.\ for_modify(target.dispatch).append(self, propagate) @property def _listen_fn(self): return self.fn_wrap or self.fn def append_to_list(self, owner, list_): if _stored_in_collection(self, owner): list_.append(self._listen_fn) return True else: return False def remove_from_list(self, owner, list_): _removed_from_collection(self, owner) list_.remove(self._listen_fn) def prepend_to_list(self, owner, list_): if _stored_in_collection(self, owner): list_.appendleft(self._listen_fn) return True else: return False """Tests for events.py.""" import functools import gc import io import os import platform import re import signal import socket try: import ssl except ImportError: ssl = None HAS_SNI = False else: from ssl import HAS_SNI import subprocess import sys import threading import time import errno import unittest from unittest import mock import weakref from test import support # find_unused_port, IPV6_ENABLED, TEST_HOME_DIR import asyncio from asyncio import proactor_events from asyncio import selector_events from asyncio import test_utils def data_file(filename): if hasattr(support, 'TEST_HOME_DIR'): fullname = os.path.join(support.TEST_HOME_DIR, filename) if os.path.isfile(fullname): return fullname fullname = os.path.join(os.path.dirname(__file__), filename) if os.path.isfile(fullname): return fullname raise FileNotFoundError(filename) def osx_tiger(): """Return True if the platform is Mac OS 10.4 or older.""" if sys.platform != 'darwin': return False version = platform.mac_ver()[0] version = tuple(map(int, version.split('.'))) return version < (10, 5) ONLYCERT = data_file('ssl_cert.pem') ONLYKEY = data_file('ssl_key.pem') SIGNED_CERTFILE = data_file('keycert3.pem') SIGNING_CA = data_file('pycacert.pem') class MyBaseProto(asyncio.Protocol): connected = None done = None def __init__(self, loop=None): self.transport = None self.state = 'INITIAL' self.nbytes = 0 if loop is not None: self.connected = asyncio.Future(loop=loop) self.done = asyncio.Future(loop=loop) def connection_made(self, transport): self.transport = transport assert self.state == 'INITIAL', self.state self.state = 'CONNECTED' if self.connected: self.connected.set_result(None) def data_received(self, data): assert self.state == 'CONNECTED', self.state self.nbytes += len(data) def eof_received(self): assert self.state == 'CONNECTED', self.state self.state = 'EOF' def connection_lost(self, exc): assert self.state in ('CONNECTED', 'EOF'), self.state self.state = 'CLOSED' if self.done: self.done.set_result(None) class MyProto(MyBaseProto): def connection_made(self, transport): super().connection_made(transport) transport.write(b'GET / HTTP/1.0\r\nHost: example.com\r\n\r\n') class MyDatagramProto(asyncio.DatagramProtocol): done = None def __init__(self, loop=None): self.state = 'INITIAL' self.nbytes = 0 if loop is not None: self.done = asyncio.Future(loop=loop) def connection_made(self, transport): self.transport = transport assert self.state == 'INITIAL', self.state self.state = 'INITIALIZED' def datagram_received(self, data, addr): assert self.state == 'INITIALIZED', self.state self.nbytes += len(data) def error_received(self, exc): assert self.state == 'INITIALIZED', self.state def connection_lost(self, exc): assert self.state == 'INITIALIZED', self.state self.state = 'CLOSED' if self.done: self.done.set_result(None) class MyReadPipeProto(asyncio.Protocol): done = None def __init__(self, loop=None): self.state = ['INITIAL'] self.nbytes = 0 self.transport = None if loop is not None: self.done = asyncio.Future(loop=loop) def connection_made(self, transport): self.transport = transport assert self.state == ['INITIAL'], self.state self.state.append('CONNECTED') def data_received(self, data): assert self.state == ['INITIAL', 'CONNECTED'], self.state self.nbytes += len(data) def eof_received(self): assert self.state == ['INITIAL', 'CONNECTED'], self.state self.state.append('EOF') def connection_lost(self, exc): if 'EOF' not in self.state: self.state.append('EOF') # It is okay if EOF is missed. assert self.state == ['INITIAL', 'CONNECTED', 'EOF'], self.state self.state.append('CLOSED') if self.done: self.done.set_result(None) class MyWritePipeProto(asyncio.BaseProtocol): done = None def __init__(self, loop=None): self.state = 'INITIAL' self.transport = None if loop is not None: self.done = asyncio.Future(loop=loop) def connection_made(self, transport): self.transport = transport assert self.state == 'INITIAL', self.state self.state = 'CONNECTED' def connection_lost(self, exc): assert self.state == 'CONNECTED', self.state self.state = 'CLOSED' if self.done: self.done.set_result(None) class MySubprocessProtocol(asyncio.SubprocessProtocol): def __init__(self, loop): self.state = 'INITIAL' self.transport = None self.connected = asyncio.Future(loop=loop) self.completed = asyncio.Future(loop=loop) self.disconnects = {fd: asyncio.Future(loop=loop) for fd in range(3)} self.data = {1: b'', 2: b''} self.returncode = None self.got_data = {1: asyncio.Event(loop=loop), 2: asyncio.Event(loop=loop)} def connection_made(self, transport): self.transport = transport assert self.state == 'INITIAL', self.state self.state = 'CONNECTED' self.connected.set_result(None) def connection_lost(self, exc): assert self.state == 'CONNECTED', self.state self.state = 'CLOSED' self.completed.set_result(None) def pipe_data_received(self, fd, data): assert self.state == 'CONNECTED', self.state self.data[fd] += data self.got_data[fd].set() def pipe_connection_lost(self, fd, exc): assert self.state == 'CONNECTED', self.state if exc: self.disconnects[fd].set_exception(exc) else: self.disconnects[fd].set_result(exc) def process_exited(self): assert self.state == 'CONNECTED', self.state self.returncode = self.transport.get_returncode() class EventLoopTestsMixin: def setUp(self): super().setUp() self.loop = self.create_event_loop() self.set_event_loop(self.loop) def tearDown(self): # just in case if we have transport close callbacks test_utils.run_briefly(self.loop) self.loop.close() gc.collect() super().tearDown() def test_run_until_complete_nesting(self): @asyncio.coroutine def coro1(): yield @asyncio.coroutine def coro2(): self.assertTrue(self.loop.is_running()) self.loop.run_until_complete(coro1()) self.assertRaises( RuntimeError, self.loop.run_until_complete, coro2()) # Note: because of the default Windows timing granularity of # 15.6 msec, we use fairly long sleep times here (~100 msec). def test_run_until_complete(self): t0 = self.loop.time() self.loop.run_until_complete(asyncio.sleep(0.1, loop=self.loop)) t1 = self.loop.time() self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0) def test_run_until_complete_stopped(self): @asyncio.coroutine def cb(): self.loop.stop() yield from asyncio.sleep(0.1, loop=self.loop) task = cb() self.assertRaises(RuntimeError, self.loop.run_until_complete, task) def test_call_later(self): results = [] def callback(arg): results.append(arg) self.loop.stop() self.loop.call_later(0.1, callback, 'hello world') t0 = time.monotonic() self.loop.run_forever() t1 = time.monotonic() self.assertEqual(results, ['hello world']) self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0) def test_call_soon(self): results = [] def callback(arg1, arg2): results.append((arg1, arg2)) self.loop.stop() self.loop.call_soon(callback, 'hello', 'world') self.loop.run_forever() self.assertEqual(results, [('hello', 'world')]) def test_call_soon_threadsafe(self): results = [] lock = threading.Lock() def callback(arg): results.append(arg) if len(results) >= 2: self.loop.stop() def run_in_thread(): self.loop.call_soon_threadsafe(callback, 'hello') lock.release() lock.acquire() t = threading.Thread(target=run_in_thread) t.start() with lock: self.loop.call_soon(callback, 'world') self.loop.run_forever() t.join() self.assertEqual(results, ['hello', 'world']) def test_call_soon_threadsafe_same_thread(self): results = [] def callback(arg): results.append(arg) if len(results) >= 2: self.loop.stop() self.loop.call_soon_threadsafe(callback, 'hello') self.loop.call_soon(callback, 'world') self.loop.run_forever() self.assertEqual(results, ['hello', 'world']) def test_run_in_executor(self): def run(arg): return (arg, threading.get_ident()) f2 = self.loop.run_in_executor(None, run, 'yo') res, thread_id = self.loop.run_until_complete(f2) self.assertEqual(res, 'yo') self.assertNotEqual(thread_id, threading.get_ident()) def test_reader_callback(self): r, w = test_utils.socketpair() r.setblocking(False) bytes_read = bytearray() def reader(): try: data = r.recv(1024) except BlockingIOError: # Spurious readiness notifications are possible # at least on Linux -- see man select. return if data: bytes_read.extend(data) else: self.assertTrue(self.loop.remove_reader(r.fileno())) r.close() self.loop.add_reader(r.fileno(), reader) self.loop.call_soon(w.send, b'abc') test_utils.run_until(self.loop, lambda: len(bytes_read) >= 3) self.loop.call_soon(w.send, b'def') test_utils.run_until(self.loop, lambda: len(bytes_read) >= 6) self.loop.call_soon(w.close) self.loop.call_soon(self.loop.stop) self.loop.run_forever() self.assertEqual(bytes_read, b'abcdef') def test_writer_callback(self): r, w = test_utils.socketpair() w.setblocking(False) def writer(data): w.send(data) self.loop.stop() data = b'x' * 1024 self.loop.add_writer(w.fileno(), writer, data) self.loop.run_forever() self.assertTrue(self.loop.remove_writer(w.fileno())) self.assertFalse(self.loop.remove_writer(w.fileno())) w.close() read = r.recv(len(data) * 2) r.close() self.assertEqual(read, data) def _basetest_sock_client_ops(self, httpd, sock): if not isinstance(self.loop, proactor_events.BaseProactorEventLoop): # in debug mode, socket operations must fail # if the socket is not in blocking mode self.loop.set_debug(True) sock.setblocking(True) with self.assertRaises(ValueError): self.loop.run_until_complete( self.loop.sock_connect(sock, httpd.address)) with self.assertRaises(ValueError): self.loop.run_until_complete( self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n')) with self.assertRaises(ValueError): self.loop.run_until_complete( self.loop.sock_recv(sock, 1024)) with self.assertRaises(ValueError): self.loop.run_until_complete( self.loop.sock_accept(sock)) # test in non-blocking mode sock.setblocking(False) self.loop.run_until_complete( self.loop.sock_connect(sock, httpd.address)) self.loop.run_until_complete( self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n')) data = self.loop.run_until_complete( self.loop.sock_recv(sock, 1024)) # consume data self.loop.run_until_complete( self.loop.sock_recv(sock, 1024)) sock.close() self.assertTrue(data.startswith(b'HTTP/1.0 200 OK')) def test_sock_client_ops(self): with test_utils.run_test_server() as httpd: sock = socket.socket() self._basetest_sock_client_ops(httpd, sock) @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') def test_unix_sock_client_ops(self): with test_utils.run_test_unix_server() as httpd: sock = socket.socket(socket.AF_UNIX) self._basetest_sock_client_ops(httpd, sock) def test_sock_client_fail(self): # Make sure that we will get an unused port address = None try: s = socket.socket() s.bind(('127.0.0.1', 0)) address = s.getsockname() finally: s.close() sock = socket.socket() sock.setblocking(False) with self.assertRaises(ConnectionRefusedError): self.loop.run_until_complete( self.loop.sock_connect(sock, address)) sock.close() def test_sock_accept(self): listener = socket.socket() listener.setblocking(False) listener.bind(('127.0.0.1', 0)) listener.listen(1) client = socket.socket() client.connect(listener.getsockname()) f = self.loop.sock_accept(listener) conn, addr = self.loop.run_until_complete(f) self.assertEqual(conn.gettimeout(), 0) self.assertEqual(addr, client.getsockname()) self.assertEqual(client.getpeername(), listener.getsockname()) client.close() conn.close() listener.close() @unittest.skipUnless(hasattr(signal, 'SIGKILL'), 'No SIGKILL') def test_add_signal_handler(self): caught = 0 def my_handler(): nonlocal caught caught += 1 # Check error behavior first. self.assertRaises( TypeError, self.loop.add_signal_handler, 'boom', my_handler) self.assertRaises( TypeError, self.loop.remove_signal_handler, 'boom') self.assertRaises( ValueError, self.loop.add_signal_handler, signal.NSIG+1, my_handler) self.assertRaises( ValueError, self.loop.remove_signal_handler, signal.NSIG+1) self.assertRaises( ValueError, self.loop.add_signal_handler, 0, my_handler) self.assertRaises( ValueError, self.loop.remove_signal_handler, 0) self.assertRaises( ValueError, self.loop.add_signal_handler, -1, my_handler) self.assertRaises( ValueError, self.loop.remove_signal_handler, -1) self.assertRaises( RuntimeError, self.loop.add_signal_handler, signal.SIGKILL, my_handler) # Removing SIGKILL doesn't raise, since we don't call signal(). self.assertFalse(self.loop.remove_signal_handler(signal.SIGKILL)) # Now set a handler and handle it. self.loop.add_signal_handler(signal.SIGINT, my_handler) os.kill(os.getpid(), signal.SIGINT) test_utils.run_until(self.loop, lambda: caught) # Removing it should restore the default handler. self.assertTrue(self.loop.remove_signal_handler(signal.SIGINT)) self.assertEqual(signal.getsignal(signal.SIGINT), signal.default_int_handler) # Removing again returns False. self.assertFalse(self.loop.remove_signal_handler(signal.SIGINT)) @unittest.skipUnless(hasattr(signal, 'SIGALRM'), 'No SIGALRM') def test_signal_handling_while_selecting(self): # Test with a signal actually arriving during a select() call. caught = 0 def my_handler(): nonlocal caught caught += 1 self.loop.stop() self.loop.add_signal_handler(signal.SIGALRM, my_handler) signal.setitimer(signal.ITIMER_REAL, 0.01, 0) # Send SIGALRM once. self.loop.run_forever() self.assertEqual(caught, 1) @unittest.skipUnless(hasattr(signal, 'SIGALRM'), 'No SIGALRM') def test_signal_handling_args(self): some_args = (42,) caught = 0 def my_handler(*args): nonlocal caught caught += 1 self.assertEqual(args, some_args) self.loop.add_signal_handler(signal.SIGALRM, my_handler, *some_args) signal.setitimer(signal.ITIMER_REAL, 0.1, 0) # Send SIGALRM once. self.loop.call_later(0.5, self.loop.stop) self.loop.run_forever() self.assertEqual(caught, 1) def _basetest_create_connection(self, connection_fut, check_sockname=True): tr, pr = self.loop.run_until_complete(connection_fut) self.assertIsInstance(tr, asyncio.Transport) self.assertIsInstance(pr, asyncio.Protocol) self.assertIs(pr.transport, tr) if check_sockname: self.assertIsNotNone(tr.get_extra_info('sockname')) self.loop.run_until_complete(pr.done) self.assertGreater(pr.nbytes, 0) tr.close() def test_create_connection(self): with test_utils.run_test_server() as httpd: conn_fut = self.loop.create_connection( lambda: MyProto(loop=self.loop), *httpd.address) self._basetest_create_connection(conn_fut) @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') def test_create_unix_connection(self): # Issue #20682: On Mac OS X Tiger, getsockname() returns a # zero-length address for UNIX socket. check_sockname = not osx_tiger() with test_utils.run_test_unix_server() as httpd: conn_fut = self.loop.create_unix_connection( lambda: MyProto(loop=self.loop), httpd.address) self._basetest_create_connection(conn_fut, check_sockname) def test_create_connection_sock(self): with test_utils.run_test_server() as httpd: sock = None infos = self.loop.run_until_complete( self.loop.getaddrinfo( *httpd.address, type=socket.SOCK_STREAM)) for family, type, proto, cname, address in infos: try: sock = socket.socket(family=family, type=type, proto=proto) sock.setblocking(False) self.loop.run_until_complete( self.loop.sock_connect(sock, address)) except: pass else: break else: assert False, 'Can not create socket.' f = self.loop.create_connection( lambda: MyProto(loop=self.loop), sock=sock) tr, pr = self.loop.run_until_complete(f) self.assertIsInstance(tr, asyncio.Transport) self.assertIsInstance(pr, asyncio.Protocol) self.loop.run_until_complete(pr.done) self.assertGreater(pr.nbytes, 0) tr.close() def _basetest_create_ssl_connection(self, connection_fut, check_sockname=True): tr, pr = self.loop.run_until_complete(connection_fut) self.assertIsInstance(tr, asyncio.Transport) self.assertIsInstance(pr, asyncio.Protocol) self.assertTrue('ssl' in tr.__class__.__name__.lower()) if check_sockname: self.assertIsNotNone(tr.get_extra_info('sockname')) self.loop.run_until_complete(pr.done) self.assertGreater(pr.nbytes, 0) tr.close() @unittest.skipIf(ssl is None, 'No ssl module') def test_create_ssl_connection(self): with test_utils.run_test_server(use_ssl=True) as httpd: conn_fut = self.loop.create_connection( lambda: MyProto(loop=self.loop), *httpd.address, ssl=test_utils.dummy_ssl_context()) self._basetest_create_ssl_connection(conn_fut) @unittest.skipIf(ssl is None, 'No ssl module') @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') def test_create_ssl_unix_connection(self): # Issue #20682: On Mac OS X Tiger, getsockname() returns a # zero-length address for UNIX socket. check_sockname = not osx_tiger() with test_utils.run_test_unix_server(use_ssl=True) as httpd: conn_fut = self.loop.create_unix_connection( lambda: MyProto(loop=self.loop), httpd.address, ssl=test_utils.dummy_ssl_context(), server_hostname='127.0.0.1') self._basetest_create_ssl_connection(conn_fut, check_sockname) def test_create_connection_local_addr(self): with test_utils.run_test_server() as httpd: port = support.find_unused_port() f = self.loop.create_connection( lambda: MyProto(loop=self.loop), *httpd.address, local_addr=(httpd.address[0], port)) tr, pr = self.loop.run_until_complete(f) expected = pr.transport.get_extra_info('sockname')[1] self.assertEqual(port, expected) tr.close() def test_create_connection_local_addr_in_use(self): with test_utils.run_test_server() as httpd: f = self.loop.create_connection( lambda: MyProto(loop=self.loop), *httpd.address, local_addr=httpd.address) with self.assertRaises(OSError) as cm: self.loop.run_until_complete(f) self.assertEqual(cm.exception.errno, errno.EADDRINUSE) self.assertIn(str(httpd.address), cm.exception.strerror) def test_create_server(self): proto = MyProto(self.loop) f = self.loop.create_server(lambda: proto, '0.0.0.0', 0) server = self.loop.run_until_complete(f) self.assertEqual(len(server.sockets), 1) sock = server.sockets[0] host, port = sock.getsockname() self.assertEqual(host, '0.0.0.0') client = socket.socket() client.connect(('127.0.0.1', port)) client.sendall(b'xxx') self.loop.run_until_complete(proto.connected) self.assertEqual('CONNECTED', proto.state) test_utils.run_until(self.loop, lambda: proto.nbytes > 0) self.assertEqual(3, proto.nbytes) # extra info is available self.assertIsNotNone(proto.transport.get_extra_info('sockname')) self.assertEqual('127.0.0.1', proto.transport.get_extra_info('peername')[0]) # close connection proto.transport.close() self.loop.run_until_complete(proto.done) self.assertEqual('CLOSED', proto.state) # the client socket must be closed after to avoid ECONNRESET upon # recv()/send() on the serving socket client.close() # close server server.close() def _make_unix_server(self, factory, **kwargs): path = test_utils.gen_unix_socket_path() self.addCleanup(lambda: os.path.exists(path) and os.unlink(path)) f = self.loop.create_unix_server(factory, path, **kwargs) server = self.loop.run_until_complete(f) return server, path @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') def test_create_unix_server(self): proto = MyProto(loop=self.loop) server, path = self._make_unix_server(lambda: proto) self.assertEqual(len(server.sockets), 1) client = socket.socket(socket.AF_UNIX) client.connect(path) client.sendall(b'xxx') self.loop.run_until_complete(proto.connected) self.assertEqual('CONNECTED', proto.state) test_utils.run_until(self.loop, lambda: proto.nbytes > 0) self.assertEqual(3, proto.nbytes) # close connection proto.transport.close() self.loop.run_until_complete(proto.done) self.assertEqual('CLOSED', proto.state) # the client socket must be closed after to avoid ECONNRESET upon # recv()/send() on the serving socket client.close() # close server server.close() @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') def test_create_unix_server_path_socket_error(self): proto = MyProto(loop=self.loop) sock = socket.socket() with sock: f = self.loop.create_unix_server(lambda: proto, '/test', sock=sock) with self.assertRaisesRegex(ValueError, 'path and sock can not be specified ' 'at the same time'): self.loop.run_until_complete(f) def _create_ssl_context(self, certfile, keyfile=None): sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext.options |= ssl.OP_NO_SSLv2 sslcontext.load_cert_chain(certfile, keyfile) return sslcontext def _make_ssl_server(self, factory, certfile, keyfile=None): sslcontext = self._create_ssl_context(certfile, keyfile) f = self.loop.create_server(factory, '127.0.0.1', 0, ssl=sslcontext) server = self.loop.run_until_complete(f) sock = server.sockets[0] host, port = sock.getsockname() self.assertEqual(host, '127.0.0.1') return server, host, port def _make_ssl_unix_server(self, factory, certfile, keyfile=None): sslcontext = self._create_ssl_context(certfile, keyfile) return self._make_unix_server(factory, ssl=sslcontext) @unittest.skipIf(ssl is None, 'No ssl module') def test_create_server_ssl(self): proto = MyProto(loop=self.loop) server, host, port = self._make_ssl_server( lambda: proto, ONLYCERT, ONLYKEY) f_c = self.loop.create_connection(MyBaseProto, host, port, ssl=test_utils.dummy_ssl_context()) client, pr = self.loop.run_until_complete(f_c) client.write(b'xxx') self.loop.run_until_complete(proto.connected) self.assertEqual('CONNECTED', proto.state) test_utils.run_until(self.loop, lambda: proto.nbytes > 0) self.assertEqual(3, proto.nbytes) # extra info is available self.assertIsNotNone(proto.transport.get_extra_info('sockname')) self.assertEqual('127.0.0.1', proto.transport.get_extra_info('peername')[0]) # close connection proto.transport.close() self.loop.run_until_complete(proto.done) self.assertEqual('CLOSED', proto.state) # the client socket must be closed after to avoid ECONNRESET upon # recv()/send() on the serving socket client.close() # stop serving server.close() @unittest.skipIf(ssl is None, 'No ssl module') @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') def test_create_unix_server_ssl(self): proto = MyProto(loop=self.loop) server, path = self._make_ssl_unix_server( lambda: proto, ONLYCERT, ONLYKEY) f_c = self.loop.create_unix_connection( MyBaseProto, path, ssl=test_utils.dummy_ssl_context(), server_hostname='') client, pr = self.loop.run_until_complete(f_c) client.write(b'xxx') self.loop.run_until_complete(proto.connected) self.assertEqual('CONNECTED', proto.state) test_utils.run_until(self.loop, lambda: proto.nbytes > 0) self.assertEqual(3, proto.nbytes) # close connection proto.transport.close() self.loop.run_until_complete(proto.done) self.assertEqual('CLOSED', proto.state) # the client socket must be closed after to avoid ECONNRESET upon # recv()/send() on the serving socket client.close() # stop serving server.close() @unittest.skipIf(ssl is None, 'No ssl module') @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') def test_create_server_ssl_verify_failed(self): proto = MyProto(loop=self.loop) server, host, port = self._make_ssl_server( lambda: proto, SIGNED_CERTFILE) sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext_client.options |= ssl.OP_NO_SSLv2 sslcontext_client.verify_mode = ssl.CERT_REQUIRED if hasattr(sslcontext_client, 'check_hostname'): sslcontext_client.check_hostname = True # no CA loaded f_c = self.loop.create_connection(MyProto, host, port, ssl=sslcontext_client) with test_utils.disable_logger(): with self.assertRaisesRegex(ssl.SSLError, 'certificate verify failed '): self.loop.run_until_complete(f_c) # close connection self.assertIsNone(proto.transport) server.close() @unittest.skipIf(ssl is None, 'No ssl module') @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') def test_create_unix_server_ssl_verify_failed(self): proto = MyProto(loop=self.loop) server, path = self._make_ssl_unix_server( lambda: proto, SIGNED_CERTFILE) sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext_client.options |= ssl.OP_NO_SSLv2 sslcontext_client.verify_mode = ssl.CERT_REQUIRED if hasattr(sslcontext_client, 'check_hostname'): sslcontext_client.check_hostname = True # no CA loaded f_c = self.loop.create_unix_connection(MyProto, path, ssl=sslcontext_client, server_hostname='invalid') with test_utils.disable_logger(): with self.assertRaisesRegex(ssl.SSLError, 'certificate verify failed '): self.loop.run_until_complete(f_c) # close connection self.assertIsNone(proto.transport) server.close() @unittest.skipIf(ssl is None, 'No ssl module') @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') def test_create_server_ssl_match_failed(self): proto = MyProto(loop=self.loop) server, host, port = self._make_ssl_server( lambda: proto, SIGNED_CERTFILE) sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext_client.options |= ssl.OP_NO_SSLv2 sslcontext_client.verify_mode = ssl.CERT_REQUIRED sslcontext_client.load_verify_locations( cafile=SIGNING_CA) if hasattr(sslcontext_client, 'check_hostname'): sslcontext_client.check_hostname = True # incorrect server_hostname f_c = self.loop.create_connection(MyProto, host, port, ssl=sslcontext_client) with test_utils.disable_logger(): with self.assertRaisesRegex( ssl.CertificateError, "hostname '127.0.0.1' doesn't match 'localhost'"): self.loop.run_until_complete(f_c) # close connection proto.transport.close() server.close() @unittest.skipIf(ssl is None, 'No ssl module') @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') def test_create_unix_server_ssl_verified(self): proto = MyProto(loop=self.loop) server, path = self._make_ssl_unix_server( lambda: proto, SIGNED_CERTFILE) sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext_client.options |= ssl.OP_NO_SSLv2 sslcontext_client.verify_mode = ssl.CERT_REQUIRED sslcontext_client.load_verify_locations(cafile=SIGNING_CA) if hasattr(sslcontext_client, 'check_hostname'): sslcontext_client.check_hostname = True # Connection succeeds with correct CA and server hostname. f_c = self.loop.create_unix_connection(MyProto, path, ssl=sslcontext_client, server_hostname='localhost') client, pr = self.loop.run_until_complete(f_c) # close connection proto.transport.close() client.close() server.close() @unittest.skipIf(ssl is None, 'No ssl module') @unittest.skipUnless(HAS_SNI, 'No SNI support in ssl module') def test_create_server_ssl_verified(self): proto = MyProto(loop=self.loop) server, host, port = self._make_ssl_server( lambda: proto, SIGNED_CERTFILE) sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext_client.options |= ssl.OP_NO_SSLv2 sslcontext_client.verify_mode = ssl.CERT_REQUIRED sslcontext_client.load_verify_locations(cafile=SIGNING_CA) if hasattr(sslcontext_client, 'check_hostname'): sslcontext_client.check_hostname = True # Connection succeeds with correct CA and server hostname. f_c = self.loop.create_connection(MyProto, host, port, ssl=sslcontext_client, server_hostname='localhost') client, pr = self.loop.run_until_complete(f_c) # close connection proto.transport.close() client.close() server.close() def test_create_server_sock(self): proto = asyncio.Future(loop=self.loop) class TestMyProto(MyProto): def connection_made(self, transport): super().connection_made(transport) proto.set_result(self) sock_ob = socket.socket(type=socket.SOCK_STREAM) sock_ob.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock_ob.bind(('0.0.0.0', 0)) f = self.loop.create_server(TestMyProto, sock=sock_ob) server = self.loop.run_until_complete(f) sock = server.sockets[0] self.assertIs(sock, sock_ob) host, port = sock.getsockname() self.assertEqual(host, '0.0.0.0') client = socket.socket() client.connect(('127.0.0.1', port)) client.send(b'xxx') client.close() server.close() def test_create_server_addr_in_use(self): sock_ob = socket.socket(type=socket.SOCK_STREAM) sock_ob.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock_ob.bind(('0.0.0.0', 0)) f = self.loop.create_server(MyProto, sock=sock_ob) server = self.loop.run_until_complete(f) sock = server.sockets[0] host, port = sock.getsockname() f = self.loop.create_server(MyProto, host=host, port=port) with self.assertRaises(OSError) as cm: self.loop.run_until_complete(f) self.assertEqual(cm.exception.errno, errno.EADDRINUSE) server.close() @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 not supported or enabled') def test_create_server_dual_stack(self): f_proto = asyncio.Future(loop=self.loop) class TestMyProto(MyProto): def connection_made(self, transport): super().connection_made(transport) f_proto.set_result(self) try_count = 0 while True: try: port = support.find_unused_port() f = self.loop.create_server(TestMyProto, host=None, port=port) server = self.loop.run_until_complete(f) except OSError as ex: if ex.errno == errno.EADDRINUSE: try_count += 1 self.assertGreaterEqual(5, try_count) continue else: raise else: break client = socket.socket() client.connect(('127.0.0.1', port)) client.send(b'xxx') proto = self.loop.run_until_complete(f_proto) proto.transport.close() client.close() f_proto = asyncio.Future(loop=self.loop) client = socket.socket(socket.AF_INET6) client.connect(('::1', port)) client.send(b'xxx') proto = self.loop.run_until_complete(f_proto) proto.transport.close() client.close() server.close() def test_server_close(self): f = self.loop.create_server(MyProto, '0.0.0.0', 0) server = self.loop.run_until_complete(f) sock = server.sockets[0] host, port = sock.getsockname() client = socket.socket() client.connect(('127.0.0.1', port)) client.send(b'xxx') client.close() server.close() client = socket.socket() self.assertRaises( ConnectionRefusedError, client.connect, ('127.0.0.1', port)) client.close() def test_create_datagram_endpoint(self): class TestMyDatagramProto(MyDatagramProto): def __init__(inner_self): super().__init__(loop=self.loop) def datagram_received(self, data, addr): super().datagram_received(data, addr) self.transport.sendto(b'resp:'+data, addr) coro = self.loop.create_datagram_endpoint( TestMyDatagramProto, local_addr=('127.0.0.1', 0)) s_transport, server = self.loop.run_until_complete(coro) host, port = s_transport.get_extra_info('sockname') self.assertIsInstance(s_transport, asyncio.Transport) self.assertIsInstance(server, TestMyDatagramProto) self.assertEqual('INITIALIZED', server.state) self.assertIs(server.transport, s_transport) coro = self.loop.create_datagram_endpoint( lambda: MyDatagramProto(loop=self.loop), remote_addr=(host, port)) transport, client = self.loop.run_until_complete(coro) self.assertIsInstance(transport, asyncio.Transport) self.assertIsInstance(client, MyDatagramProto) self.assertEqual('INITIALIZED', client.state) self.assertIs(client.transport, transport) transport.sendto(b'xxx') test_utils.run_until(self.loop, lambda: server.nbytes) self.assertEqual(3, server.nbytes) test_utils.run_until(self.loop, lambda: client.nbytes) # received self.assertEqual(8, client.nbytes) # extra info is available self.assertIsNotNone(transport.get_extra_info('sockname')) # close connection transport.close() self.loop.run_until_complete(client.done) self.assertEqual('CLOSED', client.state) server.transport.close() def test_internal_fds(self): loop = self.create_event_loop() if not isinstance(loop, selector_events.BaseSelectorEventLoop): loop.close() self.skipTest('loop is not a BaseSelectorEventLoop') self.assertEqual(1, loop._internal_fds) loop.close() self.assertEqual(0, loop._internal_fds) self.assertIsNone(loop._csock) self.assertIsNone(loop._ssock) @unittest.skipUnless(sys.platform != 'win32', "Don't support pipes for Windows") def test_read_pipe(self): proto = MyReadPipeProto(loop=self.loop) rpipe, wpipe = os.pipe() pipeobj = io.open(rpipe, 'rb', 1024) @asyncio.coroutine def connect(): t, p = yield from self.loop.connect_read_pipe( lambda: proto, pipeobj) self.assertIs(p, proto) self.assertIs(t, proto.transport) self.assertEqual(['INITIAL', 'CONNECTED'], proto.state) self.assertEqual(0, proto.nbytes) self.loop.run_until_complete(connect()) os.write(wpipe, b'1') test_utils.run_until(self.loop, lambda: proto.nbytes >= 1) self.assertEqual(1, proto.nbytes) os.write(wpipe, b'2345') test_utils.run_until(self.loop, lambda: proto.nbytes >= 5) self.assertEqual(['INITIAL', 'CONNECTED'], proto.state) self.assertEqual(5, proto.nbytes) os.close(wpipe) self.loop.run_until_complete(proto.done) self.assertEqual( ['INITIAL', 'CONNECTED', 'EOF', 'CLOSED'], proto.state) # extra info is available self.assertIsNotNone(proto.transport.get_extra_info('pipe')) @unittest.skipUnless(sys.platform != 'win32', "Don't support pipes for Windows") # select, poll and kqueue don't support character devices (PTY) on Mac OS X # older than 10.6 (Snow Leopard) @support.requires_mac_ver(10, 6) # Issue #20495: The test hangs on FreeBSD 7.2 but pass on FreeBSD 9 @support.requires_freebsd_version(8) def test_read_pty_output(self): proto = MyReadPipeProto(loop=self.loop) master, slave = os.openpty() master_read_obj = io.open(master, 'rb', 0)