From: <am...@us...> - 2007-05-15 20:05:45
|
Revision: 3227 http://svn.sourceforge.net/jython/?rev=3227&view=rev Author: amak Date: 2007-05-15 13:05:43 -0700 (Tue, 15 May 2007) Log Message: ----------- AMAK: Checking in 1. An updated socket module, based on java.nio, with SSL and non-blocking support. 2. A select module based on java.nio 3. A socket test module which has been ported from cpython 2.4 4. A select test module which has been ported from cpython 2.4 5. A new select test module with wider coverage. Added Paths: ----------- trunk/sandbox/kennedya/ trunk/sandbox/kennedya/asynch_sockets/ trunk/sandbox/kennedya/asynch_sockets/select.py trunk/sandbox/kennedya/asynch_sockets/socket.py trunk/sandbox/kennedya/asynch_sockets/test/ trunk/sandbox/kennedya/asynch_sockets/test/test_select.py trunk/sandbox/kennedya/asynch_sockets/test/test_select_new.py trunk/sandbox/kennedya/asynch_sockets/test/test_socket.py Added: trunk/sandbox/kennedya/asynch_sockets/select.py =================================================================== --- trunk/sandbox/kennedya/asynch_sockets/select.py (rev 0) +++ trunk/sandbox/kennedya/asynch_sockets/select.py 2007-05-15 20:05:43 UTC (rev 3227) @@ -0,0 +1,125 @@ +""" +AMAK: 20070515: New select implementation that uses java.nio +""" + +import java.nio.channels +from java.nio.channels.SelectionKey import OP_ACCEPT, OP_CONNECT, OP_WRITE, OP_READ + +class error(Exception): pass + +POLLIN = 1 +POLLOUT = 2 +POLLPRI = 4 + +class poll: + + def __init__(self): + self.selector = java.nio.channels.Selector.open() + self.chanmap = {} + + def _getselectable(self, userobject): + if isinstance(userobject, java.nio.channels.SelectableChannel): + return userobject + else: + if hasattr(userobject, 'fileno') and callable(getattr(userobject, 'fileno')): + result = getattr(userobject, 'fileno')() + if isinstance(result, java.nio.channels.SelectableChannel): + return result + raise error("Object '%s' is not a watchable channel" % userobject, 10038) + + def register(self, userobject, mask): + channel = self._getselectable(userobject) + jmask = 0 + if mask & POLLIN: + # Note that OP_READ is NOT a valid event on server socket channels. + if channel.validOps() & OP_ACCEPT: + jmask = OP_ACCEPT + else: + jmask = OP_READ + if mask & POLLOUT: + jmask |= OP_WRITE + if channel.validOps() & OP_CONNECT: + jmask |= OP_CONNECT + selectionkey = channel.register(self.selector, jmask) + self.chanmap[channel] = (userobject, selectionkey) + + def unregister(self, userobject): + channel = self._getselectable(userobject) + self.chanmap[channel][1].cancel() + del self.chanmap[channel] + + def _dopoll(self, timeout=None): + if timeout is None or timeout < 0: + self.selector.select() + elif timeout == 0: + self.selector.selectNow() + else: + # No multiplication required: both cpython and java use millisecond timeouts + self.selector.select(timeout) + # The returned selectedKeys cannot be used from multiple threads! + return self.selector.selectedKeys() + + def poll(self, timeout=None): + selectedkeys = self._dopoll(timeout) + results = [] + for k in selectedkeys.iterator(): + jmask = k.readyOps() + pymask = 0 + if jmask & OP_READ: pymask |= POLLIN + if jmask & OP_WRITE: pymask |= POLLOUT + if jmask & OP_ACCEPT: pymask |= POLLIN + if jmask & OP_CONNECT: pymask |= POLLOUT + # Now return the original userobject, and the return event mask + # A python 2.2 generator would be sweet here + results.append( (self.chanmap[k.channel()][0], pymask) ) + return results + + def close(self): + for k in self.selector.keys(): + k.cancel() + self.selector.close() + +def _calcselecttimeoutvalue(value): + if value is None: + return None + try: + floatvalue = float(value) + except Exception, x: + raise TypeError("Select timeout value must be a number or None") + if value < 0: + raise error("Select timeout value cannot be negative", 10022) + if floatvalue < 0.000001: + return 0 + return int(floatvalue * 1000) # Convert to milliseconds + +def select ( read_fd_list, write_fd_list, outofband_fd_list, timeout=None): + timeout = _calcselecttimeoutvalue(timeout) + # First create a poll object to do the actual watching. + pobj = poll() + # Check the read list + try: + # AMAK: Need to remove all this list searching, change to a dictionary? + for fd in read_fd_list: + mask = POLLIN + if fd in write_fd_list: + mask |= POLLOUT + pobj.register(fd, mask) + # And now the write list + for fd in write_fd_list: + if not fd in read_fd_list: # fds in both have already been registered. + pobj.register(fd, POLLOUT) + results = pobj.poll(timeout) + except AttributeError, ax: + if str(ax) == "__getitem__": + raise TypeError(ax) + raise ax + # Now start preparing the results + read_ready_list, write_ready_list, oob_ready_list = [], [], [] + for fd, mask in results: + if mask & POLLIN: + read_ready_list.append(fd) + if mask & POLLOUT: + write_ready_list.append(fd) + pobj.close() + return read_ready_list, write_ready_list, oob_ready_list + Added: trunk/sandbox/kennedya/asynch_sockets/socket.py =================================================================== --- trunk/sandbox/kennedya/asynch_sockets/socket.py (rev 0) +++ trunk/sandbox/kennedya/asynch_sockets/socket.py 2007-05-15 20:05:43 UTC (rev 3227) @@ -0,0 +1,778 @@ +""" +This is an updated socket module for use on JVMs > 1.4; it is derived from the +old jython socket module. +The primary extra it provides is non-blocking support. + +XXX Restrictions: + +- Only INET sockets +- No asynchronous behavior +- No socket options +- Can't do a very good gethostbyaddr() right... +AMAK: 20050527: added socket timeouts +AMAK: 20070515: Added non-blocking (asynchronous) support +AMAK: 20070515: Added client-side SSL support +""" + +_defaulttimeout = None + +import threading +import time +import types + +import java.nio +import java.net +import org.python.core +import jarray +import string +import sys + +from java.lang import InterruptedException + +class error(Exception): pass +class herror(error): pass +class gaierror(error): pass +class timeout(error): pass + +ALL = None + +exception_map = { + +# (<javaexception>, <circumstance>) : lambda: <code that raises the python equivalent> + +(java.net.BindException, ALL) : lambda exc: error('TODO: find python errno and string'), +(java.io.InterruptedIOException, ALL) : lambda exc: timeout('timed out'), +(java.net.SocketTimeoutException, ALL) : lambda exc: timeout('timed out'), +} + +def would_block_error(exc=None): + return error( (10035, 'The socket operation could not complete without blocking') ) + +def map_exception(exc, circumstance=ALL): + try: +# print "Mapping exception: %s" % str(exc) + return exception_map[(exc.__class__, circumstance)](exc) + except KeyError: + return error('Unmapped java exception: %s' % exc.toString()) + +exception_map.update({ + (java.nio.channels.IllegalBlockingModeException, ALL) : would_block_error, + }) + +MODE_BLOCKING = 'block' +MODE_NONBLOCKING = 'nonblock' +MODE_TIMEOUT = 'timeout' + +_permitted_modes = (MODE_BLOCKING, MODE_NONBLOCKING, MODE_TIMEOUT) + +class _nio_impl: + + timeout = None + mode = MODE_BLOCKING + + def read(self, buf): + bytebuf = java.nio.ByteBuffer.wrap(buf) + count = self.jchannel.read(bytebuf) + return count + + def write(self, buf): + bytebuf = java.nio.ByteBuffer.wrap(buf) + count = self.jchannel.write(bytebuf) + return count + + def _setreuseaddress(self, flag): + self.jsocket.setReuseAddress(flag) + + def _getreuseaddress(self, flag): + return self.jsocket.getReuseAddress() + + def getpeername(self): + return (self.jsocket.getInetAddress().getHostName(), self.jsocket.getPort() ) + + def config(self, mode, timeout): + self.mode = mode + if self.mode == MODE_BLOCKING: + self.jchannel.configureBlocking(1) + if self.mode == MODE_NONBLOCKING: + self.jchannel.configureBlocking(0) + if self.mode == MODE_TIMEOUT: + # self.channel.configureBlocking(0) + self.jsocket.setSoTimeout(int(timeout*1000)) + + def close1(self): + self.jsocket.close() + + def close2(self): + self.jchannel.close() + + def close3(self): + if not self.jsocket.isClosed(): + self.jsocket.close() + + def close4(self): + if not self.jsocket.isClosed(): + if hasattr(self.jsocket, 'shutdownInput') and not self.jsocket.isInputShutdown(): + self.jsocket.shutdownInput() + if hasattr(self.jsocket, 'shutdownOutput') and not self.jsocket.isOutputShutdown(): + self.jsocket.shutdownOutput() + self.jsocket.close() + + close = close1 +# close = close2 +# close = close3 +# close = close4 + + def getchannel(self): + return self.jchannel + + fileno = getchannel + +class _client_socket_impl(_nio_impl): + + def __init__(self, socket=None): + if socket: + self.jchannel = socket.getChannel() + self.host = socket.getInetAddress().getHostName() + self.port = socket.getPort() + else: + self.jchannel = java.nio.channels.SocketChannel.open() + self.host = None + self.port = None + self.jsocket = self.jchannel.socket() + + def connect(self, host, port): + self.host = host + self.port = port + self.jchannel.connect(java.net.InetSocketAddress(self.host, self.port)) + + def finish_connect(self): + return self.jchannel.finishConnect() + + def close(self): + _nio_impl.close(self) + +class _server_socket_impl(_nio_impl): + + def __init__(self, host, port, backlog, reuse_addr): + self.jchannel = java.nio.channels.ServerSocketChannel.open() + self.jsocket = self.jchannel.socket() + if host: + bindaddr = java.net.InetSocketAddress(host, port) + else: + bindaddr = java.net.InetSocketAddress(port) + self._setreuseaddress(reuse_addr) + self.jsocket.bind(bindaddr, backlog) + + def accept(self): + try: + if self.mode in (MODE_BLOCKING, MODE_NONBLOCKING): + new_cli_chan = self.jchannel.accept() + if new_cli_chan != None: + return _client_socket_impl(new_cli_chan.socket()) + else: + return None + else: + # In timeout mode now + new_cli_sock = self.jsocket.accept() + return _client_socket_impl(new_cli_sock) + except java.lang.Exception, jlx: + raise map_exception(jlx) + + def close(self): + _nio_impl.close(self) + +class _datagram_socket_impl(_nio_impl): + + def __init__(self, port=None, address=None, reuse_addr=0): + self.jchannel = java.nio.channels.DatagramChannel.open() + self.jsocket = self.jchannel.socket() + if port: + if address is not None: + bind_address = java.net.InetSocketAddress(address, port) + else: + bind_address = java.net.InetSocketAddress(port) + self.jsocket.bind(bind_address) + self._setreuseaddress(reuse_addr) + + def connect(self, host, port): + self.jchannel.connect(java.net.InetSocketAddress(host, port)) + + def finish_connect(self): + return self.jchannel.finishConnect() + + def receive(self, packet): + self.jsocket.receive(packet) + + def send(self, packet): + self.jsocket.send(packet) + +__all__ = [ 'AF_INET', 'SO_REUSEADDR', 'SOCK_DGRAM', 'SOCK_RAW', + 'SOCK_RDM', 'SOCK_SEQPACKET', 'SOCK_STREAM', 'SOL_SOCKET', + 'SocketType', 'error', 'herror', 'gaierror', 'timeout', + 'getfqdn', 'gethostbyaddr', 'gethostbyname', 'gethostname', + 'socket', 'getaddrinfo', 'getdefaulttimeout', 'setdefaulttimeout', + 'has_ipv6', 'htons', 'htonl', 'ntohs', 'ntohl', + ] + +AF_INET = 2 + +SOCK_DGRAM = 1 +SOCK_STREAM = 2 +SOCK_RAW = 3 # not supported +SOCK_RDM = 4 # not supported +SOCK_SEQPACKET = 5 # not supported +SOL_SOCKET = 0xFFFF +SO_REUSEADDR = 4 + +def _gethostbyaddr(name): + # This is as close as I can get; at least the types are correct... + addresses = java.net.InetAddress.getAllByName(gethostbyname(name)) + names = [] + addrs = [] + for addr in addresses: + names.append(addr.getHostName()) + addrs.append(addr.getHostAddress()) + return (names, addrs) + +def getfqdn(name=None): + """ + Return a fully qualified domain name for name. If name is omitted or empty + it is interpreted as the local host. To find the fully qualified name, + the hostname returned by gethostbyaddr() is checked, then aliases for the + host, if available. The first name which includes a period is selected. + In case no fully qualified domain name is available, the hostname is retur + New in version 2.0. + """ + if not name: + name = gethostname() + names, addrs = _gethostbyaddr(name) + for a in names: + if a.find(".") >= 0: + return a + return name + +def gethostname(): + return java.net.InetAddress.getLocalHost().getHostName() + +def gethostbyname(name): + return java.net.InetAddress.getByName(name).getHostAddress() + +def gethostbyaddr(name): + names, addrs = _gethostbyaddr(name) + return (names[0], names, addrs) + +def getservbyname(servicename, protocolname=None): + # http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071389 + # How complex is the structure of /etc/services? + raise NotImplementedError("getservbyname not yet supported on jython.") + +def getservbyport(port, protocolname=None): + # Same situation as above + raise NotImplementedError("getservbyport not yet supported on jython.") + +def getprotobyname(protocolname=None): + # Same situation as above + raise NotImplementedError("getprotobyname not yet supported on jython.") + +def socket(family = AF_INET, type = SOCK_STREAM, flags=0): + assert family == AF_INET + assert type in (SOCK_DGRAM, SOCK_STREAM) + assert flags == 0 + if type == SOCK_STREAM: + return _tcpsocket() + else: + return _udpsocket() + +def getaddrinfo(host, port, family=0, socktype=SOCK_STREAM, proto=0, flags=0): + return ( (AF_INET, socktype, 0, "", (gethostbyname(host), port)), ) + +has_ipv6 = 1 + +def getnameinfo(sock_addr, flags): + raise NotImplementedError("getnameinfo not yet supported on jython.") + +def getdefaulttimeout(): + return _defaulttimeout + +def _calctimeoutvalue(value): + if value is None: + return None + try: + floatvalue = float(value) + except: + raise TypeError('Socket timeout value must be a number or None') + if floatvalue < 0: + raise ValueError("Socket timeout value cannot be negative") + if floatvalue < 0.000001: + return 0.0 + return floatvalue + +def setdefaulttimeout(timeout): + global _defaulttimeout + try: + _defaulttimeout = _calctimeoutvalue(timeout) + finally: + _nonblocking_api_mixin.timeout = _defaulttimeout + +def htons(x): return x +def htonl(x): return x +def ntohs(x): return x +def ntohl(x): return x + +class _nonblocking_api_mixin: + + timeout = _defaulttimeout + mode = MODE_BLOCKING + + def gettimeout(self): + return self.timeout + + def settimeout(self, timeout): + self.timeout = _calctimeoutvalue(timeout) + if self.timeout is None: + self.mode = MODE_BLOCKING + elif self.timeout < 0.000001: + self.mode = MODE_NONBLOCKING + else: + self.mode = MODE_TIMEOUT + self._config() + + def setblocking(self, flag): + if flag: + self.mode = MODE_BLOCKING + self.timeout = None + else: + self.mode = MODE_NONBLOCKING + self.timeout = 0.0 + self._config() + + def _config(self): + assert self.mode in _permitted_modes + if self.sock_impl: self.sock_impl.config(self.mode, self.timeout) + + def getchannel(self): + if not self.sock_impl: + raise error("No channel for indeterminate socket") + if hasattr(self.sock_impl, 'getchannel'): + return self.sock_impl.getchannel() + raise error('Operation not implemented on this JVM') + + fileno = getchannel + + def _get_jsocket(self): + return self.sock_impl.jsocket + +def _unpack_address_tuple(address_tuple): + error_message = "Address must be a tuple of (hostname, port)" + if type(address_tuple) is not type( () ) \ + or type(address_tuple[0]) is not type("") \ + or type(address_tuple[1]) is not type(0): + raise TypeError(error_message) + return address_tuple[0], address_tuple[1] + +class _tcpsocket(_nonblocking_api_mixin): + + sock_impl = None + istream = None + ostream = None + addr = None + server = 0 + file_count = 0 + #reuse_addr = 1 + reuse_addr = 0 + + def bind(self, addr): + assert not self.sock_impl + assert not self.addr + # Do the address format check + host, port = _unpack_address_tuple(addr) + self.addr = addr + + def listen(self, backlog=50): + "This signifies a server socket" + try: + assert not self.sock_impl + self.server = 1 + if self.addr: + host, port = self.addr + else: + host, port = "", 0 + self.sock_impl = _server_socket_impl(host, port, backlog, self.reuse_addr) + self._config() + except java.lang.Exception, jlx: + raise + raise map_exception(jlx) + +# +# The following has information on a java.lang.NullPointerException problem I'm having +# +# http://developer.java.sun.com/developer/bugParade/bugs/4801882.html + + def accept(self): + "This signifies a server socket" + try: + if not self.sock_impl: + self.listen() + assert self.server + new_sock = self.sock_impl.accept() + if not new_sock: + raise would_block_error() + cliconn = _tcpsocket() + cliconn._setup(new_sock) + return cliconn, new_sock.getpeername() + except java.lang.Exception, jlx: + raise map_exception(jlx) + + def _get_host_port(self, addr): + host, port = _unpack_address_tuple(addr) + if host == "": + host = java.net.InetAddress.getLocalHost() + return host, port + + def connect(self, addr): + "This signifies a client socket" + assert not self.sock_impl + host, port = self._get_host_port(addr) + self.sock_impl = _client_socket_impl() + self._config() # Configure timeouts, etc, now that the socket exists + self.sock_impl.connect(host, port) + self._setup(self.sock_impl) + + def connect_ex(self, addr): + "This signifies a client socket" + assert not self.sock_impl + host, port = self._get_host_port(addr) + self.sock_impl = _client_socket_impl() + self._config() # Configure timeouts, etc, now that the socket exists + self.sock_impl.connect(host, port) + if self.sock_impl.finish_connect(): + self._setup(self.sock_impl) + return 0 + return 1 + + def _setup(self, sock): + self.sock_impl = sock + self.sock_impl._setreuseaddress(self.reuse_addr) + if self.mode != MODE_NONBLOCKING: + self.istream = self.sock_impl.jsocket.getInputStream() + self.ostream = self.sock_impl.jsocket.getOutputStream() + + def recv(self, n): + try: + if not self.sock_impl: raise error('Socket not open') + if self.sock_impl.jchannel.isConnectionPending(): + self.sock_impl.jchannel.finishConnect() + data = jarray.zeros(n, 'b') + m = self.sock_impl.read(data) + if m <= 0: + if self.mode == MODE_NONBLOCKING: + raise would_block_error() + return "" + if m < n: + data = data[:m] + return data.tostring() + except java.lang.Exception, jlx: + raise map_exception(jlx) + + def recvfrom(self, n): + return self.recv(n), None + + def send(self, s): + if not self.sock_impl: raise error('Socket not open') + if self.sock_impl.jchannel.isConnectionPending(): + self.sock_impl.jchannel.finishConnect() + #n = len(s) + numwritten = self.sock_impl.write(s) + return numwritten + + sendall = send + + def getsockname(self): + if not self.sock_impl: + host, port = self.addr or ("", 0) + host = java.net.InetAddress.getByName(host).getHostAddress() + else: + if self.server: + host = self.sock_impl.jsocket.getInetAddress().getHostAddress() + else: + host = self.sock_impl.jsocket.getLocalAddress().getHostAddress() + port = self.sock_impl.jsocket.getLocalPort() + return (host, port) + + def getpeername(self): + assert self.sock_impl + assert not self.server + host = self.sock_impl.jsocket.getInetAddress().getHostAddress() + port = self.sock_impl.jsocket.getPort() + return (host, port) + + def setsockopt(self, level, optname, value): + if optname == SO_REUSEADDR: + self.reuse_addr = value + + def getsockopt(self, level, optname): + if optname == SO_REUSEADDR: + return self.reuse_addr + + def makefile(self, mode="r", bufsize=-1): + file = None + if self.istream: + if self.ostream: + file = org.python.core.PyFile(self.istream, self.ostream, + "<socket>", mode) + else: + file = org.python.core.PyFile(self.istream, "<socket>", mode) + elif self.ostream: + file = org.python.core.PyFile(self.ostream, "<socket>", mode) + else: + raise IOError, "both istream and ostream have been shut down" + if file: + return _tcpsocket.FileWrapper(self, file) + + class FileWrapper: + def __init__(self, socket, file): + self.socket = socket + self.sock = socket.sock_impl + self.istream = socket.istream + self.ostream = socket.ostream + + self.file = file + self.read = file.read + self.readline = file.readline + self.readlines = file.readlines + self.write = file.write + self.writelines = file.writelines + self.flush = file.flush + self.seek = file.seek + self.tell = file.tell + self.closed = file.closed + + self.socket.file_count += 1 + + def close(self): + if self.file.closed: + # Already closed + return + + self.socket.file_count -= 1 + self.file.close() + self.closed = self.file.closed + + if self.socket.file_count == 0 and self.socket.sock_impl == 0: + # This is the last file Only close the socket and streams + # if there are no outstanding files left. + if self.sock: + self.sock.close() + if self.istream: + self.istream.close() + if self.ostream: + self.ostream.close() + + def shutdown(self, how): + assert how in (0, 1, 2) + assert self.sock_impl + if how in (0, 2): + self.istream = None + if how in (1, 2): + self.ostream = None + + def close(self): + if not self.sock_impl: + return + sock = self.sock_impl + istream = self.istream + ostream = self.ostream + self.sock_impl = 0 + self.istream = 0 + self.ostream = 0 + # Only close the socket and streams if there are no + # outstanding files left. + if self.file_count == 0: + if istream: + istream.close() + if ostream: + ostream.close() + if sock: + sock.close() + +class _udpsocket(_nonblocking_api_mixin): + + def __init__(self): + self.sock_impl = None + self.addr = None + self.reuse_addr = 0 + + def bind(self, addr): + assert not self.sock_impl + host, port = _unpack_address_tuple(addr) + host_address = java.net.InetAddress.getByName(host) + self.sock_impl = _datagram_socket_impl(port, host_address, reuse_addr = self.reuse_addr) + self._config() + + def connect(self, addr): + host, port = _unpack_address_tuple(addr) + assert not self.addr + if not self.sock_impl: + self.sock_impl = _datagram_socket_impl() + self._config() + self.sock_impl.connect(host, port) + self.addr = addr # convert host to InetAddress instance? + + def connect_ex(self, addr): + host, port = _unpack_address_tuple(addr) + assert not self.addr + self.addr = addr + if not self.sock_impl: + self.sock_impl = _datagram_socket_impl() + self._config() + self.sock_impl.connect(host, port) + if self.sock_impl.finish_connect(): + return 0 + return 1 + + def sendto(self, data, p1, p2=None): + if not p2: + flags, addr = 0, p1 + else: + flags, addr = 0, p2 + n = len(data) + if not self.sock_impl: + self.sock_impl = _datagram_socket_impl() + host, port = addr + bytes = jarray.array(map(ord, data), 'b') + a = java.net.InetAddress.getByName(host) + packet = java.net.DatagramPacket(bytes, n, a, port) + self.sock_impl.send(packet) + return n + + def send(self, data): + assert self.addr + return self.sendto(data, self.addr) + + def recvfrom(self, n): + try: + assert self.sock_impl + bytes = jarray.zeros(n, 'b') + packet = java.net.DatagramPacket(bytes, n) + self.sock_impl.receive(packet) + host = None + if packet.getAddress(): + host = packet.getAddress().getHostName() + port = packet.getPort() + m = packet.getLength() + if m < n: + bytes = bytes[:m] + return bytes.tostring(), (host, port) + except java.lang.Exception, jlx: + raise map_exception(jlx) + + def recv(self, n): + try: + assert self.sock_impl + bytes = jarray.zeros(n, 'b') + packet = java.net.DatagramPacket(bytes, n) + self.sock_impl.receive(packet) + m = packet.getLength() + if m < n: + bytes = bytes[:m] + return bytes.tostring() + except java.lang.Exception, jlx: + raise map_exception(jlx) + + def getsockname(self): + assert self.sock_impl + host = self.sock_impl.jsocket.getLocalAddress().getHostName() + port = self.sock_impl.jsocket.getLocalPort() + return (host, port) + + def getpeername(self): + assert self.sock + host = self.sock_impl.jsocket.getInetAddress().getHostName() + port = self.sock_impl.jsocket.getPort() + return (host, port) + + def __del__(self): + self.close() + + def close(self): + if not self.sock_impl: + return + sock = self.sock_impl + self.sock_impl = 0 + sock.close() + + def setsockopt(self, level, optname, value): + if optname == SO_REUSEADDR: + self.reuse_addr = value +# self.sock._setreuseaddress(value) + + def getsockopt(self, level, optname): + if optname == SO_REUSEADDR: + return self.sock_impl._getreuseaddress() + else: + return None + +SocketType = _tcpsocket + +# Define the SSL support + +from javax.net.ssl import SSLSocketFactory +from java.io import BufferedInputStream +from java.io import BufferedOutputStream + +class ssl: + + def __init__(self, plain_sock, keyfile=None, certfile=None): + self.ssl_sock = self.make_ssl_socket(plain_sock) + + def make_ssl_socket(self, plain_socket, auto_close=0): + java_net_socket = plain_socket._get_jsocket() + assert isinstance(java_net_socket, java.net.Socket) + host = java_net_socket.getInetAddress().getHostName() + port = java_net_socket.getPort() + factory = SSLSocketFactory.getDefault(); + ssl_socket = factory.createSocket(java_net_socket, host, port, auto_close) + ssl_socket.setEnabledCipherSuites(ssl_socket.getSupportedCipherSuites()) + ssl_socket.startHandshake() + return ssl_socket + + def read(self, n=4096): + # Probably needs some work on efficency + in_buf = BufferedInputStream(self.ssl_sock.getInputStream()) + data = jarray.zeros(n, 'b') + m = in_buf.read(data, 0, n) + if m <= 0: + return "" + if m < n: + data = data[:m] + return data.tostring() + + def write(self, s): + # Probably needs some work on efficency + out = BufferedOutputStream(self.ssl_sock.getOutputStream()) + out.write(s) + out.flush() + + def _get_server_cert(self): + return self.ssl_sock.getSession().getPeerCertificates()[0] + + def server(self): + cert = self._get_server_cert() + return cert.getSubjectDN().toString() + + def issuer(self): + cert = self._get_server_cert() + return cert.getIssuerDN().toString() + +def test(): + s = socket(AF_INET, SOCK_STREAM) + s.connect(("", 80)) + s.send("GET / HTTP/1.0\r\n\r\n") + while 1: + data = s.recv(2000) + print data + if not data: + break + +if __name__ == '__main__': + test() Added: trunk/sandbox/kennedya/asynch_sockets/test/test_select.py =================================================================== --- trunk/sandbox/kennedya/asynch_sockets/test/test_select.py (rev 0) +++ trunk/sandbox/kennedya/asynch_sockets/test/test_select.py 2007-05-15 20:05:43 UTC (rev 3227) @@ -0,0 +1,166 @@ +""" +AMAK: 20050515: This module is the test_select.py from cpython 2.4, ported to jython + unittest +""" + +try: + object +except NameError: + class object: pass + +import socket, select + +import os +import sys +import unittest + +class SelectWrapper: + + def __init__(self): + self.read_fds = [] + self.write_fds = [] + self.oob_fds = [] + self.timeout = None + + def add_read_fd(self, fd): + self.read_fds.append(fd) + + def add_write_fd(self, fd): + self.write_fds.append(fd) + + def add_oob_fd(self, fd): + self.oob_fds.append(fd) + + def set_timeout(self, timeout): + self.timeout = timeout + +class PollWrapper: + + def __init__(self): + self.timeout = None + self.poll_object = select.poll() + + def add_read_fd(self, fd): + self.poll_object.register(fd, select.POLL_IN) + + def add_write_fd(self, fd): + self.poll_object.register(fd, select.POLL_OUT) + + def add_oob_fd(self, fd): + self.poll_object.register(fd, select.POLL_PRI) + +class TestSelectInvalidParameters(unittest.TestCase): + + def testBadSelectSetTypes(self): + # Test some known error conditions + for bad_select_set in [None, 1,]: + for pos in range(2): # OOB not supported on Java + args = [[], [], []] + args[pos] = bad_select_set + try: + timeout = 0 # Can't wait forever + rfd, wfd, xfd = select.select(args[0], args[1], args[2], timeout) + except TypeError: + pass + else: + self.fail("Selecting on '%s' should have raised TypeError" % str(bad_select_set)) + + def testBadSelectableTypes(self): + class Nope: pass + + class Almost1: + def fileno(self): + return 'fileno' + + class Almost2: + def fileno(self): + return 'fileno' + + # Test some known error conditions + for bad_selectable in [None, 1, object(), Nope(), Almost1(), Almost2()]: + try: + timeout = 0 # Can't wait forever + rfd, wfd, xfd = select.select([bad_selectable], [], [], timeout) + except (TypeError, select.error), x: + pass + else: + self.fail("Selecting on '%s' should have raised TypeError or select.error" % str(bad_selectable)) + + def testInvalidTimeoutTypes(self): + for invalid_timeout in ['not a number']: + try: + rfd, wfd, xfd = select.select([], [], [], invalid_timeout) + except TypeError: + pass + else: + self.fail("Invalid timeout value '%s' should have raised TypeError" % invalid_timeout) + + def testInvalidTimeoutValues(self): + for invalid_timeout in [-1]: + try: + rfd, wfd, xfd = select.select([], [], [], invalid_timeout) + except (ValueError, select.error): + pass + else: + self.fail("Invalid timeout value '%s' should have raised ValueError or select.error" % invalid_timeout) + +class TestSelectClientSocket(unittest.TestCase): + + def testUnopenedSocket(self): + # This one passes on cpython + # But fails on jython, because of the deferred creation of impl sockets + if sys.platform[:4] == 'java': return + sockets = [socket.socket(socket.AF_INET, socket.SOCK_STREAM) for x in range(5)] + for pos in range(2): # OOB not supported on Java + args = [[], [], []] + args[pos] = sockets + timeout = 0 # Can't wait forever + rfd, wfd, xfd = select.select(args[0], args[1], args[2], timeout) + for s in sockets: + self.failIf(s in rfd) + self.failIf(s in wfd) + +class TestPipes(unittest.TestCase): + + verbose = 1 + + def test(self): + import sys + from test.test_support import verbose + if sys.platform[:3] in ('win', 'mac', 'os2', 'riscos'): + if verbose: + print "Can't test select easily on", sys.platform + return + cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do echo testing...; sleep 1; done' + p = os.popen(cmd, 'r') + for tout in (0, 1, 2, 4, 8, 16) + (None,)*10: + if verbose: + print 'timeout =', tout + rfd, wfd, xfd = select.select([p], [], [], tout) + if (rfd, wfd, xfd) == ([], [], []): + continue + if (rfd, wfd, xfd) == ([p], [], []): + line = p.readline() + if verbose: + print repr(line) + if not line: + if verbose: + print 'EOF' + break + continue + self.fail('Unexpected return values from select(): %s' % str(rfd, wfd, xfd)) + p.close() + +def test_main(): + tests = [ + TestSelectInvalidParameters, + TestSelectClientSocket, + ] + if sys.platform[:4] != 'java': + tests.append(TestPipes) + suites = [unittest.makeSuite(klass, 'test') for klass in tests] + main_suite = unittest.TestSuite(suites) + runner = unittest.TextTestRunner(verbosity=100) + runner.run(main_suite) + +if __name__ == "__main__": + test_main() Added: trunk/sandbox/kennedya/asynch_sockets/test/test_select_new.py =================================================================== --- trunk/sandbox/kennedya/asynch_sockets/test/test_select_new.py (rev 0) +++ trunk/sandbox/kennedya/asynch_sockets/test/test_select_new.py 2007-05-15 20:05:43 UTC (rev 3227) @@ -0,0 +1,276 @@ +""" +AMAK: 20050515: This module is a brand new test_select module, which gives much wider coverage. +""" + +import sys +import time +import unittest + +import socket +import select + +NOT_READY, READY = 0, 1 + +SERVER_ADDRESS = ("localhost", 54321) + +DATA_CHUNK_SIZE = 1000 ; DATA_CHUNK = "." * DATA_CHUNK_SIZE + +# +# The timing of these tests depends on the how the unerlying OS socket library +# handles buffering. These values may need tweaking for different platforms +# +# The fundamental problem is that there is no reliable way to fill a socket with bytes +# + +if sys.platform[:4] == 'java': + SELECT_TIMEOUT = 0 +else: + # zero select timeout fails these tests on cpython (on windows 2003 anyway) + SELECT_TIMEOUT = 0.001 + +READ_TIMEOUT = 5 + +class AsynchronousServer: + + def __init__(self): + self.server_socket = None + + def create_socket(self): + self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.server_socket.setblocking(0) + self.server_socket.bind(SERVER_ADDRESS) + self.server_socket.listen(5) + try: + self.server_socket.accept() + except socket.error: + pass + + def verify_acceptable_status(self, expected_acceptability): + actual_acceptability = NOT_READY + rfds, wfds, xfds = select.select([self.server_socket], [], [], SELECT_TIMEOUT) + if self.server_socket in rfds: + actual_acceptability = READY + assert actual_acceptability == expected_acceptability, "Server socket should %sbe acceptable" % {NOT_READY:'not ',READY:''}[expected_acceptability] + + def accept_connection(self): + rfds, wfds, xfds = select.select([self.server_socket], [], [], SELECT_TIMEOUT) + assert self.server_socket in rfds, "Server socket had no pending connections" + new_socket, address = self.server_socket.accept() + return AsynchronousHandler(new_socket) + + def close(self): + self.server_socket.close() + +class PeerImpl: + + def fill_outchannel(self): + """ + This implementation is sub-optimal. + It is reliant on how the OS handles the socket buffers. + """ + total_bytes = 0 + while 1: + try: + rfds, wfds, xfds = select.select([], [self.socket], [], SELECT_TIMEOUT) + if self.socket in wfds: + bytes_sent = self.socket.send(DATA_CHUNK) + total_bytes += bytes_sent + else: + return total_bytes + except socket.error, se: + if se.value == 10035: + continue + raise se + + def read_inchannel(self, expected): + buf_size = expected ; results = "" ; start = time.time() + while 1: + if (expected - len(results)) < buf_size: + buf_size = expected - len(results) + rfds, wfds, xfds = select.select([self.socket], [], [], SELECT_TIMEOUT) + if self.socket in rfds: + recvd_bytes = self.socket.recv(buf_size) + if len(recvd_bytes): + results += recvd_bytes + if len(results) == expected: + return results + else: + stop = time.time() + if (stop - start) > READ_TIMEOUT: + raise Exception("Exceeded alloted time (%1.3lf > %1.3lf) to read %d bytes: got %d" % ((stop-start), READ_TIMEOUT, expected, len(results))) + + def verify_status(self, expected_readability, expected_writability): + actual_readability, actual_writability = NOT_READY, NOT_READY + rfds, wfds, xfds = select.select([self.socket], [self.socket], [], SELECT_TIMEOUT) + if self.socket in rfds: + actual_readability = READY + if self.socket in wfds: + actual_writability = READY + assert actual_readability == expected_readability, "Socket should %sbe ready for reading: %s" % ({NOT_READY:'not ',READY:''}[expected_readability], rfds) + assert actual_writability == expected_writability, "Socket should %sbe ready for writing: %s" % ({NOT_READY:'not ',READY:''}[expected_writability], wfds) + + def fileno(self): + return self.socket.fileno() + + def close(self): + self.socket.close() + +class AsynchronousHandler(PeerImpl): + + def __init__(self, new_socket): + self.socket = new_socket + self.socket.setblocking(0) + +class AsynchronousClient(PeerImpl): + + def __init__(self): + self.socket = None + self.connected = 0 + + def create_socket(self): + self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.socket.setblocking(0) + + def start_connect(self): + result = self.socket.connect_ex(SERVER_ADDRESS) + if result == 0: + self.connected = 1 + + def finish_connect(self): + if self.connected: + return + rfds, wfds, xfds = select.select([], [self.socket], [], SELECT_TIMEOUT) + assert self.socket in wfds, "Client socket incomplete connect" + +def log(message): + print message + +class TestSelect(unittest.TestCase): + + def test000_CreateSockets(self): + # Create the server + TestSelect.server_socket = AsynchronousServer() + TestSelect.server_socket.create_socket() + + # Create the client + TestSelect.client_socket = AsynchronousClient() + TestSelect.client_socket.create_socket() + + def test100_ServerSocketNoPendingConnections(self): + # Check the server is not marked "acceptable" + TestSelect.server_socket.verify_acceptable_status(NOT_READY) + + def test110_ServerSocketPendingConnections(self): + # Start the client connection process + TestSelect.client_socket.start_connect() + # Check the server is now acceptable + TestSelect.server_socket.verify_acceptable_status(READY) + + def test120_ServerSocketNoPendingConnection(self): + TestSelect.handler_socket = TestSelect.server_socket.accept_connection() + TestSelect.server_socket.verify_acceptable_status(NOT_READY) + + def test130_EmptyChannel(self): + # Finish the connection + TestSelect.client_socket.finish_connect() + + # + # Test the client-out -> handler-in channel on its own + # + + def test200_EmptyChannel(self): + # And now test the status of both end of the socket + TestSelect.client_socket.verify_status(NOT_READY, READY) + TestSelect.handler_socket.verify_status(NOT_READY, READY) + + def test210_FullChannel(self): + TestSelect.num_bytes_outstanding = TestSelect.client_socket.fill_outchannel() + TestSelect.client_socket.verify_status(NOT_READY, NOT_READY) + TestSelect.handler_socket.verify_status(READY, READY) + + def test220_PartiallyFullChannel(self): + # Half empty the channel + num_bytes_to_retrieve = TestSelect.num_bytes_outstanding / 2 + bytes_retrieved = TestSelect.handler_socket.read_inchannel(num_bytes_to_retrieve) + TestSelect.num_bytes_outstanding -= len(bytes_retrieved) + TestSelect.client_socket.verify_status(NOT_READY, READY) + TestSelect.handler_socket.verify_status(READY, READY) + + def test230_EmptyChannel(self): + # Empty the channel + bytes_retrieved = TestSelect.handler_socket.read_inchannel(TestSelect.num_bytes_outstanding) + TestSelect.client_socket.verify_status(NOT_READY, READY) + TestSelect.handler_socket.verify_status(NOT_READY, READY) + + # + # Test the handler-out -> client-in channel on its own + # + + def test300_EmptyChannel(self): + # And now test the status of both end of the socket + TestSelect.client_socket.verify_status(NOT_READY, READY) + TestSelect.handler_socket.verify_status(NOT_READY, READY) + + def test310_FullChannel(self): + TestSelect.num_bytes_outstanding = TestSelect.handler_socket.fill_outchannel() + TestSelect.client_socket.verify_status(READY, READY) + TestSelect.handler_socket.verify_status(NOT_READY, NOT_READY) + + def test320_PartiallyFullChannel(self): + # Half empty the channel + num_bytes_to_retrieve = TestSelect.num_bytes_outstanding / 2 + bytes_retrieved = TestSelect.client_socket.read_inchannel(num_bytes_to_retrieve) + TestSelect.num_bytes_outstanding -= len(bytes_retrieved) + TestSelect.client_socket.verify_status(READY, READY) + TestSelect.handler_socket.verify_status(NOT_READY, READY) + + def test330_EmptyChannel(self): + # Empty the channel + TestSelect.client_socket.read_inchannel(TestSelect.num_bytes_outstanding) + TestSelect.client_socket.verify_status(NOT_READY, READY) + TestSelect.handler_socket.verify_status(NOT_READY, READY) + + # + # Test both channels active at the same time + # + + def test400_EmptyChannels(self): + # And now test the status of both end of the socket + TestSelect.client_socket.verify_status(NOT_READY, READY) + TestSelect.handler_socket.verify_status(NOT_READY, READY) + + def test410_FullChannels(self): + TestSelect.num_bytes_outstanding_c = TestSelect.client_socket.fill_outchannel() + TestSelect.num_bytes_outstanding_h = TestSelect.handler_socket.fill_outchannel() + TestSelect.client_socket.verify_status(READY, NOT_READY) + TestSelect.handler_socket.verify_status(READY, NOT_READY) + + def est420_PartiallyFullChannels(self): + # Half empty the channel + num_bytes_to_retrieve_c = TestSelect.num_bytes_outstanding_c / 2 + num_bytes_to_retrieve_h = TestSelect.num_bytes_outstanding_h / 2 + bytes_retrieved_c = TestSelect.client_socket.read_inchannel(num_bytes_to_retrieve_c) + bytes_retrieved_h = TestSelect.handler_socket.read_inchannel(num_bytes_to_retrieve_h) + TestSelect.num_bytes_outstanding_c -= len(bytes_retrieved_c) + TestSelect.num_bytes_outstanding_h -= len(bytes_retrieved_h) + TestSelect.client_socket.verify_status(READY, READY) + TestSelect.handler_socket.verify_status(READY, READY) + + def test430_EmptyChannels(self): + # Empty the channel + TestSelect.client_socket.read_inchannel(TestSelect.num_bytes_outstanding_c) + TestSelect.handler_socket.read_inchannel(TestSelect.num_bytes_outstanding_h) + TestSelect.client_socket.verify_status(NOT_READY, READY) + TestSelect.handler_socket.verify_status(NOT_READY, READY) + + # + # Now close the whole lot down + # + + def test99999_CloseSockets(self): + TestSelect.client_socket.close() + TestSelect.handler_socket.close() + TestSelect.server_socket.close() + +if __name__ == "__main__": + unittest.main() Added: trunk/sandbox/kennedya/asynch_sockets/test/test_socket.py =================================================================== --- trunk/sandbox/kennedya/asynch_sockets/test/test_socket.py (rev 0) +++ trunk/sandbox/kennedya/asynch_sockets/test/test_socket.py 2007-05-15 20:05:43 UTC (rev 3227) @@ -0,0 +1,960 @@ +from __future__ import nested_scopes + +""" +AMAK: 20050515: This module is the test_socket.py from cpython 2.4, ported to jython. +""" + +import unittest +#from test import test_support + +import socket +import select +import time +import thread, threading +import Queue +import sys +from weakref import proxy + +PORT = 50007 +HOST = 'localhost' +MSG = 'Michael Gilfix was here\n' + +try: + True +except NameError: + True, False = 1, 0 + +class SocketTCPTest(unittest.TestCase): + + def setUp(self): + self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.serv.bind((HOST, PORT)) + self.serv.listen(1) + + def tearDown(self): + self.serv.close() + self.serv = None + +class SocketUDPTest(unittest.TestCase): + + def setUp(self): + self.serv = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + self.serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.serv.bind((HOST, PORT)) + + def tearDown(self): + self.serv.close() + self.serv = None + +class ThreadableTest: + """Threadable Test class + + The ThreadableTest class makes it easy to create a threaded + client/server pair from an existing unit test. To create a + new threaded class from an existing unit test, use multiple + inheritance: + + class NewClass (OldClass, ThreadableTest): + pass + + This class defines two new fixture functions with obvious + purposes for overriding: + + clientSetUp () + clientTearDown () + + Any new test functions within the class must then define + tests in pairs, where the test name is preceeded with a + '_' to indicate the client portion of the test. Ex: + + def testFoo(self): + # Server portion + + def _testFoo(self): + # Client portion + + Any exceptions raised by the clients during their tests + are caught and transferred to the main thread to alert + the testing framework. + + Note, the server setup function cannot call any blocking + functions that rely on the client thread during setup, + unless serverExplicityReady() is called just before + the blocking call (such as in setting up a client/server + connection and performing the accept() in setUp(). + """ + + def __init__(self): + # Swap the true setup function + self.__setUp = self.setUp + self.__tearDown = self.tearDown + self.setUp = self._setUp + self.tearDown = self._tearDown + + def serverExplicitReady(self): + """This method allows the server to explicitly indicate that + it wants the client thread to proceed. This is useful if the + server is about to execute a blocking routine that is + dependent upon the client thread during its setup routine.""" + self.server_ready.set() + + def _setUp(self): + self.server_ready = threading.Event() + self.client_ready = threading.Event() + self.done = threading.Event() + self.queue = Queue.Queue(1) + + # Do some munging to start the client test. + methodname = self.id() + i = methodname.rfind('.') + methodname = methodname[i+1:] + self.test_method_name = methodname + test_method = getattr(self, '_' + methodname) + self.client_thread = thread.start_new_thread( + self.clientRun, (test_method,)) + + self.__setUp() + if not self.server_ready.isSet(): + self.server_ready.set() + self.client_ready.wait() + + def _tearDown(self): + self.__tearDown() + self.done.wait() + + if not self.queue.empty(): + msg = self.queue.get() + self.fail(msg) + + def clientRun(self, test_func): + self.server_ready.wait() + self.client_ready.set() + self.clientSetUp() + if not callable(test_func): + raise TypeError, "test_func must be a callable function" + try: + test_func() + except Exception, strerror: + self.queue.put(strerror) + self.clientTearDown() + + def clientSetUp(self): + raise NotImplementedError, "clientSetUp must be implemented." + + def clientTearDown(self): + self.done.set() + if sys.platform[:4] != 'java': + # This causes the whole process to exit on jython + # Probably related to problems with daemon status of threads + thread.exit() + +class ThreadedTCPSocketTest(SocketTCPTest, ThreadableTest): + + def __init__(self, methodName='runTest'): + SocketTCPTest.__init__(self, methodName=methodName) + ThreadableTest.__init__(self) + + def clientSetUp(self): + self.cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + def clientTearDown(self): + self.cli.close() + self.cli = None + ThreadableTest.clientTearDown(self) + +class ThreadedUDPSocketTest(SocketUDPTest, ThreadableTest): + + def __init__(self, methodName='runTest'): + SocketUDPTest.__init__(self, methodName=methodName) + ThreadableTest.__init__(self) + + def clientSetUp(self): + self.cli = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + +class SocketConnectedTest(ThreadedTCPSocketTest): + + def __init__(self, methodName='runTest'): + ThreadedTCPSocketTest.__init__(self, methodName=methodName) + + def setUp(self): + ThreadedTCPSocketTest.setUp(self) + # Indicate explicitly we're ready for the client thread to + # proceed and then perform the blocking call to accept + self.serverExplicitReady() + conn, addr = self.serv.accept() + self.cli_conn = conn + + def tearDown(self): + self.cli_conn.close() + self.cli_conn = None + ThreadedTCPSocketTest.tearDown(self) + + def clientSetUp(self): + ThreadedTCPSocketTest.clientSetUp(self) + self.cli.connect((HOST, PORT)) + self.serv_conn = self.cli + + def clientTearDown(self): + self.serv_conn.close() + self.serv_conn = None + ThreadedTCPSocketTest.clientTearDown(self) + +class SocketPairTest(unittest.TestCase, ThreadableTest): + + def __init__(self, methodName='runTest'): + unittest.TestCase.__init__(self, methodName=methodName) + ThreadableTest.__init__(self) + + def setUp(self): + self.serv, self.cli = socket.socketpair() + + def tearDown(self): + self.serv.close() + self.serv = None + + def clientSetUp(self): + pass + + def clientTearDown(self): + self.cli.close() + self.cli = None + ThreadableTest.clientTearDown(self) + + +####################################################################### +## Begin Tests + +class GeneralModuleTests(unittest.TestCase): + + def test_weakref(self): + if sys.platform[:4] == 'java': return + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + p = proxy(s) + self.assertEqual(p.fileno(), s.fileno()) + s.close() + s = None + try: + p.fileno() + except ReferenceError: + pass + else: + self.fail('Socket proxy still exists') + + def testSocketError(self): + # Testing socket module exceptions + def raise_error(*args, **kwargs): + raise socket.error + def raise_herror(*args, **kwargs): + raise socket.herror + def raise_gaierror(*args, **kwargs): + raise socket.gaierror + self.failUnlessRaises(socket.error, raise_error, + "Error raising socket exception.") + self.failUnlessRaises(socket.error, raise_herror, + "Error raising socket exception.") + self.failUnlessRaises(socket.error, raise_gaierror, + "Error raising socket exception.") + + def testCrucialConstants(self): + # Testing for mission critical constants + socket.AF_INET + socket.SOCK_STREAM + socket.SOCK_DGRAM + socket.SOCK_RAW + socket.SOCK_RDM + socket.SOCK_SEQPACKET + socket.SOL_SOCKET + socket.SO_REUSEADDR + + def testHostnameRes(self): + # Testing hostname resolution mechanisms + hostname = socket.gethostname() + try: + ip = socket.gethostbyname(hostname) + except socket.error: + # Probably name lookup wasn't set up right; skip this test + self.fail("Probably name lookup wasn't set up right; skip testHostnameRes.gethostbyname") + return + self.assert_(ip.find('.') >= 0, "Error resolving host to ip.") + try: + hname, aliases, ipaddrs = socket.gethostbyaddr(ip) + except socket.error: + # Probably a similar problem as above; skip this test + self.fail("Probably name lookup wasn't set up right; skip testHostnameRes.gethostbyaddr") + return + all_host_names = [hostname, hname] + aliases + fqhn = socket.getfqdn() + if not fqhn in all_host_names: + self.fail("Error testing host resolution mechanisms.") + + def testRefCountGetNameInfo(self): + # Testing reference count for getnameinfo + import sys + if hasattr(sys, "getrefcount"): + try: + # On some versions, this loses a reference + orig = sys.getrefcount(__name__) + socket.getnameinfo(__name__,0) + except SystemError: + if sys.getrefcount(__name__) <> orig: + self.fail("socket.getnameinfo loses a reference") + + def testInterpreterCrash(self): + if sys.platform[:4] == 'java': return + # Making sure getnameinfo doesn't crash the interpreter + try: + # On some versions, this crashes the interpreter. + socket.getnameinfo(('x', 0, 0, 0), 0) + except socket.error: + pass + +# Need to implement binary AND for ints and longs + + def testNtoH(self): + if sys.platform[:4] == 'java': return # problems with int & long + # This just checks that htons etc. are their own inverse, + # when looking at the lower 16 or 32 bits. + sizes = {socket.htonl: 32, socket.ntohl: 32, + socket.htons: 16, socket.ntohs: 16} + for func, size in sizes.items(): + mask = (1L<<size) - 1 + for i in (0, 1, 0xffff, ~0xffff, 2, 0x01234567, 0x76543210): + self.assertEqual(i & mask, func(func(i&mask)) & mask) + + swapped = func(mask) + self.assertEqual(swapped & mask, mask) + self.assertRaises(OverflowError, func, 1L<<34) + + def testGetServBy(self): + if sys.platform[:4] == 'java': return # not implemented on java + eq = self.assertEqual + # Find one service that exists, then check all the related interfaces. + # I've ordered this by protocols that have both a tcp and udp + # protocol, at least for modern Linuxes. + if sys.platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', + 'darwin'): + # avoid the 'echo' service on this platform, as there is an + # assumption breaking non-standard port/protocol entry + services = ('daytime', 'qotd', 'domain') + else: + services = ('echo', 'daytime', 'domain') + for service in services: + try: + port = socket.getservbyname(service, 'tcp') + break + except socket.error: + pass + else: + raise socket.error + # Try same call with optional protocol omitted + port2 = socket.getservbyname(service) + eq(port, port2) + # Try udp, but don't barf it it doesn't exist + try: + udpport = socket.getservbyname(service, 'udp') + ... [truncated message content] |
From: <fwi...@us...> - 2007-11-05 23:50:41
|
Revision: 3639 http://jython.svn.sourceforge.net/jython/?rev=3639&view=rev Author: fwierzbicki Date: 2007-11-05 15:50:40 -0800 (Mon, 05 Nov 2007) Log Message: ----------- Grabbed the ast generation code from the python sandbox, added the latest spark, asdl and Python.asdl from the python trunk, and fixed a small problem in asdl_java.py. Now produces the Java ast from the latest Python.asdl. Added Paths: ----------- trunk/sandbox/ast/ trunk/sandbox/ast/Python.asdl trunk/sandbox/ast/asdl.py trunk/sandbox/ast/asdl_java.py trunk/sandbox/ast/spark.py Added: trunk/sandbox/ast/Python.asdl =================================================================== --- trunk/sandbox/ast/Python.asdl (rev 0) +++ trunk/sandbox/ast/Python.asdl 2007-11-05 23:50:40 UTC (rev 3639) @@ -0,0 +1,115 @@ +-- ASDL's five builtin types are identifier, int, string, object, bool + +module Python version "$Revision: 53731 $" +{ + mod = Module(stmt* body) + | Interactive(stmt* body) + | Expression(expr body) + + -- not really an actual node but useful in Jython's typesystem. + | Suite(stmt* body) + + stmt = FunctionDef(identifier name, arguments args, + stmt* body, expr* decorators) + | ClassDef(identifier name, expr* bases, stmt* body) + | Return(expr? value) + + | Delete(expr* targets) + | Assign(expr* targets, expr value) + | AugAssign(expr target, operator op, expr value) + + -- not sure if bool is allowed, can always use int + | Print(expr? dest, expr* values, bool nl) + + -- use 'orelse' because else is a keyword in target languages + | For(expr target, expr iter, stmt* body, stmt* orelse) + | While(expr test, stmt* body, stmt* orelse) + | If(expr test, stmt* body, stmt* orelse) + | With(expr context_expr, expr? optional_vars, stmt* body) + + -- 'type' is a bad name + | Raise(expr? type, expr? inst, expr? tback) + | TryExcept(stmt* body, excepthandler* handlers, stmt* orelse) + | TryFinally(stmt* body, stmt* finalbody) + | Assert(expr test, expr? msg) + + | Import(alias* names) + | ImportFrom(identifier module, alias* names, int? level) + + -- Doesn't capture requirement that locals must be + -- defined if globals is + -- still supports use as a function! + | Exec(expr body, expr? globals, expr? locals) + + | Global(identifier* names) + | Expr(expr value) + | Pass | Break | Continue + + -- XXX Jython will be different + -- col_offset is the byte offset in the utf8 string the parser uses + attributes (int lineno, int col_offset) + + -- BoolOp() can use left & right? + expr = BoolOp(boolop op, expr* values) + | BinOp(expr left, operator op, expr right) + | UnaryOp(unaryop op, expr operand) + | Lambda(arguments args, expr body) + | IfExp(expr test, expr body, expr orelse) + | Dict(expr* keys, expr* values) + | ListComp(expr elt, comprehension* generators) + | GeneratorExp(expr elt, comprehension* generators) + -- the grammar constrains where yield expressions can occur + | Yield(expr? value) + -- need sequences for compare to distinguish between + -- x < 4 < 3 and (x < 4) < 3 + | Compare(expr left, cmpop* ops, expr* comparators) + | Call(expr func, expr* args, keyword* keywords, + expr? starargs, expr? kwargs) + | Repr(expr value) + | Num(object n) -- a number as a PyObject. + | Str(string s) -- need to specify raw, unicode, etc? + -- other literals? bools? + + -- the following expression can appear in assignment context + | Attribute(expr value, identifier attr, expr_context ctx) + | Subscript(expr value, slice slice, expr_context ctx) + | Name(identifier id, expr_context ctx) + | List(expr* elts, expr_context ctx) + | Tuple(expr* elts, expr_context ctx) + + -- col_offset is the byte offset in the utf8 string the parser uses + attributes (int lineno, int col_offset) + + expr_context = Load | Store | Del | AugLoad | AugStore | Param + + slice = Ellipsis | Slice(expr? lower, expr? upper, expr? step) + | ExtSlice(slice* dims) + | Index(expr value) + + boolop = And | Or + + operator = Add | Sub | Mult | Div | Mod | Pow | LShift + | RShift | BitOr | BitXor | BitAnd | FloorDiv + + unaryop = Invert | Not | UAdd | USub + + cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn + + comprehension = (expr target, expr iter, expr* ifs) + + -- not sure what to call the first argument for raise and except + -- TODO(jhylton): Figure out if there is a better way to handle + -- lineno and col_offset fields, particularly when + -- ast is exposed to Python. + excepthandler = (expr? type, expr? name, stmt* body, int lineno, + int col_offset) + + arguments = (expr* args, identifier? vararg, + identifier? kwarg, expr* defaults) + + -- keyword arguments supplied to call + keyword = (identifier arg, expr value) + + -- import name with optional 'as' alias. + alias = (identifier name, identifier? asname) +} Added: trunk/sandbox/ast/asdl.py =================================================================== --- trunk/sandbox/ast/asdl.py (rev 0) +++ trunk/sandbox/ast/asdl.py 2007-11-05 23:50:40 UTC (rev 3639) @@ -0,0 +1,415 @@ +"""An implementation of the Zephyr Abstract Syntax Definition Language. + +See http://asdl.sourceforge.net/ and +http://www.cs.princeton.edu/~danwang/Papers/dsl97/dsl97-abstract.html. + +Only supports top level module decl, not view. I'm guessing that view +is intended to support the browser and I'm not interested in the +browser. + +Changes for Python: Add support for module versions +""" + +#__metaclass__ = type + +import os +import traceback + +import spark + +class Token: + # spark seems to dispatch in the parser based on a token's + # type attribute + def __init__(self, type, lineno): + self.type = type + self.lineno = lineno + + def __str__(self): + return self.type + + def __repr__(self): + return str(self) + +class Id(Token): + def __init__(self, value, lineno): + self.type = 'Id' + self.value = value + self.lineno = lineno + + def __str__(self): + return self.value + +class String(Token): + def __init__(self, value, lineno): + self.type = 'String' + self.value = value + self.lineno = lineno + +class ASDLSyntaxError: + + def __init__(self, lineno, token=None, msg=None): + self.lineno = lineno + self.token = token + self.msg = msg + + def __str__(self): + if self.msg is None: + return "Error at '%s', line %d" % (self.token, self.lineno) + else: + return "%s, line %d" % (self.msg, self.lineno) + +class ASDLScanner(spark.GenericScanner, object): + + def tokenize(self, input): + self.rv = [] + self.lineno = 1 + super(ASDLScanner, self).tokenize(input) + return self.rv + + def t_id(self, s): + r"[\w\.]+" + # XXX doesn't distinguish upper vs. lower, which is + # significant for ASDL. + self.rv.append(Id(s, self.lineno)) + + def t_string(self, s): + r'"[^"]*"' + self.rv.append(String(s, self.lineno)) + + def t_xxx(self, s): # not sure what this production means + r"<=" + self.rv.append(Token(s, self.lineno)) + + def t_punctuation(self, s): + r"[\{\}\*\=\|\(\)\,\?\:]" + self.rv.append(Token(s, self.lineno)) + + def t_comment(self, s): + r"\-\-[^\n]*" + pass + + def t_newline(self, s): + r"\n" + self.lineno += 1 + + def t_whitespace(self, s): + r"[ \t]+" + pass + + def t_default(self, s): + r" . +" + raise ValueError, "unmatched input: %s" % `s` + +class ASDLParser(spark.GenericParser, object): + def __init__(self): + super(ASDLParser, self).__init__("module") + + def typestring(self, tok): + return tok.type + + def error(self, tok): + raise ASDLSyntaxError(tok.lineno, tok) + + def p_module_0(self, (module, name, version, _0, _1)): + " module ::= Id Id version { } " + if module.value != "module": + raise ASDLSyntaxError(module.lineno, + msg="expected 'module', found %s" % module) + return Module(name, None, version) + + def p_module(self, (module, name, version, _0, definitions, _1)): + " module ::= Id Id version { definitions } " + if module.value != "module": + raise ASDLSyntaxError(module.lineno, + msg="expected 'module', found %s" % module) + return Module(name, definitions, version) + + def p_version(self, (version, V)): + "version ::= Id String" + if version.value != "version": + raise ASDLSyntaxError(version.lineno, + msg="expected 'version', found %" % version) + return V + + def p_definition_0(self, (definition,)): + " definitions ::= definition " + return definition + + def p_definition_1(self, (definitions, definition)): + " definitions ::= definition definitions " + return definitions + definition + + def p_definition(self, (id, _, type)): + " definition ::= Id = type " + return [Type(id, type)] + + def p_type_0(self, (product,)): + " type ::= product " + return product + + def p_type_1(self, (sum,)): + " type ::= sum " + return Sum(sum) + + def p_type_2(self, (sum, id, _0, attributes, _1)): + " type ::= sum Id ( fields ) " + if id.value != "attributes": + raise ASDLSyntaxError(id.lineno, + msg="expected attributes, found %s" % id) + if attributes: + attributes.reverse() + return Sum(sum, attributes) + + def p_product(self, (_0, fields, _1)): + " product ::= ( fields ) " + # XXX can't I just construct things in the right order? + fields.reverse() + return Product(fields) + + def p_sum_0(self, (constructor,)): + " sum ::= constructor """ + return [constructor] + + def p_sum_1(self, (constructor, _, sum)): + " sum ::= constructor | sum " + return [constructor] + sum + + def p_sum_2(self, (constructor, _, sum)): + " sum ::= constructor | sum " + return [constructor] + sum + + def p_constructor_0(self, (id,)): + " constructor ::= Id " + return Constructor(id) + + def p_constructor_1(self, (id, _0, fields, _1)): + " constructor ::= Id ( fields ) " + # XXX can't I just construct things in the right order? + fields.reverse() + return Constructor(id, fields) + + def p_fields_0(self, (field,)): + " fields ::= field " + return [field] + + def p_fields_1(self, (field, _, fields)): + " fields ::= field , fields " + return fields + [field] + + def p_field_0(self, (type,)): + " field ::= Id " + return Field(type) + + def p_field_1(self, (type, name)): + " field ::= Id Id " + return Field(type, name) + + def p_field_2(self, (type, _, name)): + " field ::= Id * Id " + return Field(type, name, seq=1) + + def p_field_3(self, (type, _, name)): + " field ::= Id ? Id " + return Field(type, name, opt=1) + + def p_field_4(self, (type, _)): + " field ::= Id * " + return Field(type, seq=1) + + def p_field_5(self, (type, _)): + " field ::= Id ? " + return Field(type, opt=1) + +builtin_types = ("identifier", "string", "int", "bool", "object") + +# below is a collection of classes to capture the AST of an AST :-) +# not sure if any of the methods are useful yet, but I'm adding them +# piecemeal as they seem helpful + +class AST: + pass # a marker class + +class Module(AST): + def __init__(self, name, dfns, version): + self.name = name + self.dfns = dfns + self.version = version + self.types = {} # maps type name to value (from dfns) + for type in dfns: + self.types[type.name.value] = type.value + + def __repr__(self): + return "Module(%s, %s)" % (self.name, self.dfns) + +class Type(AST): + def __init__(self, name, value): + self.name = name + self.value = value + + def __repr__(self): + return "Type(%s, %s)" % (self.name, self.value) + +class Constructor(AST): + def __init__(self, name, fields=None): + self.name = name + self.fields = fields or [] + + def __repr__(self): + return "Constructor(%s, %s)" % (self.name, self.fields) + +class Field(AST): + def __init__(self, type, name=None, seq=0, opt=0): + self.type = type + self.name = name + self.seq = seq + self.opt = opt + + def __repr__(self): + if self.seq: + extra = ", seq=1" + elif self.opt: + extra = ", opt=1" + else: + extra = "" + if self.name is None: + return "Field(%s%s)" % (self.type, extra) + else: + return "Field(%s, %s%s)" % (self.type, self.name, extra) + +class Sum(AST): + def __init__(self, types, attributes=None): + self.types = types + self.attributes = attributes or [] + + def __repr__(self): + if self.attributes is None: + return "Sum(%s)" % self.types + else: + return "Sum(%s, %s)" % (self.types, self.attributes) + +class Product(AST): + def __init__(self, fields): + self.fields = fields + + def __repr__(self): + return "Product(%s)" % self.fields + +class VisitorBase(object): + + def __init__(self, skip=0): + self.cache = {} + self.skip = skip + + def visit(self, object, *args): + meth = self._dispatch(object) + if meth is None: + return + try: + meth(object, *args) + except Exception, err: + print "Error visiting", repr(object) + print err + traceback.print_exc() + # XXX hack + if hasattr(self, 'file'): + self.file.flush() + os._exit(1) + + def _dispatch(self, object): + assert isinstance(object, AST), repr(object) + klass = object.__class__ + meth = self.cache.get(klass) + if meth is None: + methname = "visit" + klass.__name__ + if self.skip: + meth = getattr(self, methname, None) + else: + meth = getattr(self, methname) + self.cache[klass] = meth + return meth + +class Check(VisitorBase): + + def __init__(self): + super(Check, self).__init__(skip=1) + self.cons = {} + self.errors = 0 + self.types = {} + + def visitModule(self, mod): + for dfn in mod.dfns: + self.visit(dfn) + + def visitType(self, type): + self.visit(type.value, str(type.name)) + + def visitSum(self, sum, name): + for t in sum.types: + self.visit(t, name) + + def visitConstructor(self, cons, name): + key = str(cons.name) + conflict = self.cons.get(key) + if conflict is None: + self.cons[key] = name + else: + print "Redefinition of constructor %s" % key + print "Defined in %s and %s" % (conflict, name) + self.errors += 1 + for f in cons.fields: + self.visit(f, key) + + def visitField(self, field, name): + key = str(field.type) + l = self.types.setdefault(key, []) + l.append(name) + + def visitProduct(self, prod, name): + for f in prod.fields: + self.visit(f, name) + +def check(mod): + v = Check() + v.visit(mod) + + for t in v.types: + if not mod.types.has_key(t) and not t in builtin_types: + v.errors += 1 + uses = ", ".join(v.types[t]) + print "Undefined type %s, used in %s" % (t, uses) + + return not v.errors + +def parse(file): + scanner = ASDLScanner() + parser = ASDLParser() + + buf = open(file).read() + tokens = scanner.tokenize(buf) + try: + return parser.parse(tokens) + except ASDLSyntaxError, err: + print err + lines = buf.split("\n") + print lines[err.lineno - 1] # lines starts at 0, files at 1 + +if __name__ == "__main__": + import glob + import sys + + if len(sys.argv) > 1: + files = sys.argv[1:] + else: + testdir = "tests" + files = glob.glob(testdir + "/*.asdl") + + for file in files: + print file + mod = parse(file) + print "module", mod.name + print len(mod.dfns), "definitions" + if not check(mod): + print "Check failed" + else: + for dfn in mod.dfns: + print dfn.type Added: trunk/sandbox/ast/asdl_java.py =================================================================== --- trunk/sandbox/ast/asdl_java.py (rev 0) +++ trunk/sandbox/ast/asdl_java.py 2007-11-05 23:50:40 UTC (rev 3639) @@ -0,0 +1,360 @@ +"""Generate Java code from an ASDL description.""" + +# TO DO +# handle fields that have a type but no name + +import os, sys, traceback + +import asdl + +TABSIZE = 4 +MAX_COL = 76 + +def reflow_lines(s, depth): + """Reflow the line s indented depth tabs. + + Return a sequence of lines where no line extends beyond MAX_COL + when properly indented. The first line is properly indented based + exclusively on depth * TABSIZE. All following lines -- these are + the reflowed lines generated by this function -- start at the same + column as the first character beyond the opening { in the first + line. + """ + size = MAX_COL - depth * TABSIZE + if len(s) < size: + return [s] + + lines = [] + cur = s + padding = "" + while len(cur) > size: + i = cur.rfind(' ', 0, size) + assert i != -1, "Impossible line to reflow: %s" % `s` + lines.append(padding + cur[:i]) + if len(lines) == 1: + # find new size based on brace + j = cur.find('{', 0, i) + if j >= 0: + j += 2 # account for the brace and the space after it + size -= j + padding = " " * j + cur = cur[i+1:] + else: + lines.append(padding + cur) + return lines + +class EmitVisitor(asdl.VisitorBase): + """Visit that emits lines""" + + def __init__(self): + super(EmitVisitor, self).__init__() + + def open(self, name, refersToSimpleNode=1, useDataOutput=0): + self.file = open("%s.java" % name, "w") + print >> self.file, "// Autogenerated AST node" + print >> self.file, 'package org.python.parser.ast;' + if refersToSimpleNode: + print >> self.file, 'import org.python.parser.SimpleNode;' + if useDataOutput: + print >> self.file, 'import java.io.DataOutputStream;' + print >> self.file, 'import java.io.IOException;' + print >> self.file + + def close(self): + self.file.close() + + def emit(self, s, depth): + # XXX reflow long lines? + lines = reflow_lines(s, depth) + for line in lines: + line = (" " * TABSIZE * depth) + line + "\n" + self.file.write(line) + + + +# This step will add a 'simple' boolean attribute to all Sum and Product +# nodes and add a 'typedef' link to each Field node that points to the +# Sum or Product node that defines the field. + +class AnalyzeVisitor(EmitVisitor): + index = 0 + def makeIndex(self): + self.index += 1 + return self.index + + def visitModule(self, mod): + self.types = {} + for dfn in mod.dfns: + self.types[str(dfn.name)] = dfn.value + for dfn in mod.dfns: + self.visit(dfn) + + def visitType(self, type, depth=0): + self.visit(type.value, type.name, depth) + + def visitSum(self, sum, name, depth): + sum.simple = 1 + for t in sum.types: + if t.fields: + sum.simple = 0 + break + for t in sum.types: + if not sum.simple: + t.index = self.makeIndex() + self.visit(t, name, depth) + + def visitProduct(self, product, name, depth): + product.simple = 0 + product.index = self.makeIndex() + for f in product.fields: + self.visit(f, depth + 1) + + def visitConstructor(self, cons, name, depth): + for f in cons.fields: + self.visit(f, depth + 1) + + def visitField(self, field, depth): + field.typedef = self.types.get(str(field.type)) + + + +# The code generator itself. +# +class JavaVisitor(EmitVisitor): + def visitModule(self, mod): + for dfn in mod.dfns: + self.visit(dfn) + + def visitType(self, type, depth=0): + self.visit(type.value, type.name, depth) + + def visitSum(self, sum, name, depth): + if sum.simple: + self.simple_sum(sum, name, depth) + else: + self.sum_with_constructor(sum, name, depth) + + def simple_sum(self, sum, name, depth): + self.open("%sType" % name, refersToSimpleNode=0) + self.emit("public interface %(name)sType {" % locals(), depth) + for i in range(len(sum.types)): + type = sum.types[i] + self.emit("public static final int %s = %d;" % (type.name, i+1), + depth + 1) + self.emit("", 0) + self.emit("public static final String[] %sTypeNames = new String[] {" % + name, depth+1) + self.emit('"<undef>",', depth+2) + for type in sum.types: + self.emit('"%s",' % type.name, depth+2) + self.emit("};", depth+1) + self.emit("}", depth) + self.close() + + def sum_with_constructor(self, sum, name, depth): + self.open("%sType" % name) + self.emit("public abstract class %(name)sType extends SimpleNode {" % + locals(), depth) + self.emit("}", depth) + self.close() + for t in sum.types: + self.visit(t, name, depth) + + def visitProduct(self, product, name, depth): + self.open("%sType" % name, useDataOutput=1) + self.emit("public class %(name)sType extends SimpleNode {" % locals(), depth) + for f in product.fields: + self.visit(f, depth + 1) + self.emit("", depth) + + self.javaMethods(product, name, "%sType" % name, product.fields, + depth+1) + + self.emit("}", depth) + self.close() + + def visitConstructor(self, cons, name, depth): + self.open(cons.name, useDataOutput=1) + enums = [] + for f in cons.fields: + if f.typedef and f.typedef.simple: + enums.append("%sType" % f.type) + if enums: + s = "implements %s " % ", ".join(enums) + else: + s = "" + self.emit("public class %s extends %sType %s{" % + (cons.name, name, s), depth) + for f in cons.fields: + self.visit(f, depth + 1) + self.emit("", depth) + + self.javaMethods(cons, cons.name, cons.name, cons.fields, depth+1) + + self.emit("}", depth) + self.close() + + def javaMethods(self, type, clsname, ctorname, fields, depth): + # The java ctors + fpargs = ", ".join([self.fieldDef(f) for f in fields]) + + self.emit("public %s(%s) {" % (ctorname, fpargs), depth) + for f in fields: + self.emit("this.%s = %s;" % (f.name, f.name), depth+1) + self.emit("}", depth) + self.emit("", 0) + + if fpargs: + fpargs += ", " + self.emit("public %s(%sSimpleNode parent) {" % (ctorname, fpargs), depth) + self.emit("this(%s);" % + ", ".join([str(f.name) for f in fields]), depth+1) + self.emit("this.beginLine = parent.beginLine;", depth+1); + self.emit("this.beginColumn = parent.beginColumn;", depth+1); + self.emit("}", depth) + self.emit("", 0) + + # The toString() method + self.emit("public String toString() {", depth) + self.emit('StringBuffer sb = new StringBuffer("%s[");' % clsname, + depth+1) + for f in fields: + self.emit('sb.append("%s=");' % f.name, depth+1) + if not self.bltinnames.has_key(str(f.type)) and f.typedef.simple: + self.emit("sb.append(dumpThis(this.%s, %sType.%sTypeNames));" % + (f.name, f.type, f.type), depth+1) + else: + self.emit("sb.append(dumpThis(this.%s));" % f.name, depth+1) + if f != fields[-1]: + self.emit('sb.append(", ");', depth+1) + self.emit('sb.append("]");', depth+1) + self.emit("return sb.toString();", depth+1) + self.emit("}", depth) + self.emit("", 0) + + # The pickle() method + self.emit("public void pickle(DataOutputStream ostream) throws IOException {", depth) + self.emit("pickleThis(%s, ostream);" % type.index, depth+1); + for f in fields: + self.emit("pickleThis(this.%s, ostream);" % f.name, depth+1) + self.emit("}", depth) + self.emit("", 0) + + # The accept() method + self.emit("public Object accept(VisitorIF visitor) throws Exception {", depth) + if clsname == ctorname: + self.emit('return visitor.visit%s(this);' % clsname, depth+1) + else: + self.emit('traverse(visitor);' % clsname, depth+1) + self.emit('return null;' % clsname, depth+1) + self.emit("}", depth) + self.emit("", 0) + + # The visitChildren() method + self.emit("public void traverse(VisitorIF visitor) throws Exception {", depth) + for f in fields: + if self.bltinnames.has_key(str(f.type)): + continue + if f.typedef.simple: + continue + if f.seq: + self.emit('if (%s != null) {' % f.name, depth+1) + self.emit('for (int i = 0; i < %s.length; i++) {' % f.name, + depth+2) + self.emit('if (%s[i] != null)' % f.name, depth+3) + self.emit('%s[i].accept(visitor);' % f.name, depth+4) + self.emit('}', depth+2) + self.emit('}', depth+1) + else: + self.emit('if (%s != null)' % f.name, depth+1) + self.emit('%s.accept(visitor);' % f.name, depth+2) + self.emit('}', depth) + self.emit("", 0) + + def visitField(self, field, depth): + self.emit("public %s;" % self.fieldDef(field), depth) + + bltinnames = { + 'int' : 'int', + 'bool' : 'boolean', + 'identifier' : 'String', + 'string' : 'String', + 'object' : 'Object', # was PyObject + } + + def fieldDef(self, field): + jtype = str(field.type) + if field.typedef and field.typedef.simple: + jtype = 'int' + else: + jtype = self.bltinnames.get(jtype, jtype + 'Type') + name = field.name + seq = field.seq and "[]" or "" + return "%(jtype)s%(seq)s %(name)s" % locals() + + +class VisitorVisitor(EmitVisitor): + def __init__(self): + EmitVisitor.__init__(self) + self.ctors = [] + + + def visitModule(self, mod): + for dfn in mod.dfns: + self.visit(dfn) + self.open("VisitorIF", refersToSimpleNode=0) + self.emit('public interface VisitorIF {', 0) + for ctor in self.ctors: + self.emit("public Object visit%s(%s node) throws Exception;" % + (ctor, ctor), 1) + self.emit('}', 0) + self.close() + + self.open("VisitorBase") + self.emit('public abstract class VisitorBase implements VisitorIF {', 0) + for ctor in self.ctors: + self.emit("public Object visit%s(%s node) throws Exception {" % + (ctor, ctor), 1) + self.emit("Object ret = unhandled_node(node);", 2) + self.emit("traverse(node);", 2) + self.emit("return ret;", 2) + self.emit('}', 1) + self.emit('', 0) + + self.emit("abstract protected Object unhandled_node(SimpleNode node) throws Exception;", 1) + self.emit("abstract public void traverse(SimpleNode node) throws Exception;", 1) + self.emit('}', 0) + self.close() + + def visitType(self, type, depth=1): + self.visit(type.value, type.name, depth) + + def visitSum(self, sum, name, depth): + if not sum.simple: + for t in sum.types: + self.visit(t, name, depth) + + def visitProduct(self, product, name, depth): + pass + + def visitConstructor(self, cons, name, depth): + self.ctors.append(cons.name) + + + +class ChainOfVisitors: + def __init__(self, *visitors): + self.visitors = visitors + + def visit(self, object): + for v in self.visitors: + v.visit(object) + +if __name__ == "__main__": + mod = asdl.parse(sys.argv[1]) + if not asdl.check(mod): + sys.exit(1) + c = ChainOfVisitors(AnalyzeVisitor(), + JavaVisitor(), + VisitorVisitor()) + c.visit(mod) Added: trunk/sandbox/ast/spark.py =================================================================== --- trunk/sandbox/ast/spark.py (rev 0) +++ trunk/sandbox/ast/spark.py 2007-11-05 23:50:40 UTC (rev 3639) @@ -0,0 +1,840 @@ +# Copyright (c) 1998-2002 John Aycock +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +__version__ = 'SPARK-0.7 (pre-alpha-5)' + +import re +import sys +import string + +def _namelist(instance): + namelist, namedict, classlist = [], {}, [instance.__class__] + for c in classlist: + for b in c.__bases__: + classlist.append(b) + for name in c.__dict__.keys(): + if not namedict.has_key(name): + namelist.append(name) + namedict[name] = 1 + return namelist + +class GenericScanner: + def __init__(self, flags=0): + pattern = self.reflect() + self.re = re.compile(pattern, re.VERBOSE|flags) + + self.index2func = {} + for name, number in self.re.groupindex.items(): + self.index2func[number-1] = getattr(self, 't_' + name) + + def makeRE(self, name): + doc = getattr(self, name).__doc__ + rv = '(?P<%s>%s)' % (name[2:], doc) + return rv + + def reflect(self): + rv = [] + for name in _namelist(self): + if name[:2] == 't_' and name != 't_default': + rv.append(self.makeRE(name)) + + rv.append(self.makeRE('t_default')) + return string.join(rv, '|') + + def error(self, s, pos): + print "Lexical error at position %s" % pos + raise SystemExit + + def tokenize(self, s): + pos = 0 + n = len(s) + while pos < n: + m = self.re.match(s, pos) + if m is None: + self.error(s, pos) + + groups = m.groups() + for i in range(len(groups)): + if groups[i] and self.index2func.has_key(i): + self.index2func[i](groups[i]) + pos = m.end() + + def t_default(self, s): + r'( . | \n )+' + print "Specification error: unmatched input" + raise SystemExit + +# +# Extracted from GenericParser and made global so that [un]picking works. +# +class _State: + def __init__(self, stateno, items): + self.T, self.complete, self.items = [], [], items + self.stateno = stateno + +class GenericParser: + # + # An Earley parser, as per J. Earley, "An Efficient Context-Free + # Parsing Algorithm", CACM 13(2), pp. 94-102. Also J. C. Earley, + # "An Efficient Context-Free Parsing Algorithm", Ph.D. thesis, + # Carnegie-Mellon University, August 1968. New formulation of + # the parser according to J. Aycock, "Practical Earley Parsing + # and the SPARK Toolkit", Ph.D. thesis, University of Victoria, + # 2001, and J. Aycock and R. N. Horspool, "Practical Earley + # Parsing", unpublished paper, 2001. + # + + def __init__(self, start): + self.rules = {} + self.rule2func = {} + self.rule2name = {} + self.collectRules() + self.augment(start) + self.ruleschanged = 1 + + _NULLABLE = '\e_' + _START = 'START' + _BOF = '|-' + + # + # When pickling, take the time to generate the full state machine; + # some information is then extraneous, too. Unfortunately we + # can't save the rule2func map. + # + def __getstate__(self): + if self.ruleschanged: + # + # XXX - duplicated from parse() + # + self.computeNull() + self.newrules = {} + self.new2old = {} + self.makeNewRules() + self.ruleschanged = 0 + self.edges, self.cores = {}, {} + self.states = { 0: self.makeState0() } + self.makeState(0, self._BOF) + # + # XXX - should find a better way to do this.. + # + changes = 1 + while changes: + changes = 0 + for k, v in self.edges.items(): + if v is None: + state, sym = k + if self.states.has_key(state): + self.goto(state, sym) + changes = 1 + rv = self.__dict__.copy() + for s in self.states.values(): + del s.items + del rv['rule2func'] + del rv['nullable'] + del rv['cores'] + return rv + + def __setstate__(self, D): + self.rules = {} + self.rule2func = {} + self.rule2name = {} + self.collectRules() + start = D['rules'][self._START][0][1][1] # Blech. + self.augment(start) + D['rule2func'] = self.rule2func + D['makeSet'] = self.makeSet_fast + self.__dict__ = D + + # + # A hook for GenericASTBuilder and GenericASTMatcher. Mess + # thee not with this; nor shall thee toucheth the _preprocess + # argument to addRule. + # + def preprocess(self, rule, func): return rule, func + + def addRule(self, doc, func, _preprocess=1): + fn = func + rules = string.split(doc) + + index = [] + for i in range(len(rules)): + if rules[i] == '::=': + index.append(i-1) + index.append(len(rules)) + + for i in range(len(index)-1): + lhs = rules[index[i]] + rhs = rules[index[i]+2:index[i+1]] + rule = (lhs, tuple(rhs)) + + if _preprocess: + rule, fn = self.preprocess(rule, func) + + if self.rules.has_key(lhs): + self.rules[lhs].append(rule) + else: + self.rules[lhs] = [ rule ] + self.rule2func[rule] = fn + self.rule2name[rule] = func.__name__[2:] + self.ruleschanged = 1 + + def collectRules(self): + for name in _namelist(self): + if name[:2] == 'p_': + func = getattr(self, name) + doc = func.__doc__ + self.addRule(doc, func) + + def augment(self, start): + rule = '%s ::= %s %s' % (self._START, self._BOF, start) + self.addRule(rule, lambda args: args[1], 0) + + def computeNull(self): + self.nullable = {} + tbd = [] + + for rulelist in self.rules.values(): + lhs = rulelist[0][0] + self.nullable[lhs] = 0 + for rule in rulelist: + rhs = rule[1] + if len(rhs) == 0: + self.nullable[lhs] = 1 + continue + # + # We only need to consider rules which + # consist entirely of nonterminal symbols. + # This should be a savings on typical + # grammars. + # + for sym in rhs: + if not self.rules.has_key(sym): + break + else: + tbd.append(rule) + changes = 1 + while changes: + changes = 0 + for lhs, rhs in tbd: + if self.nullable[lhs]: + continue + for sym in rhs: + if not self.nullable[sym]: + break + else: + self.nullable[lhs] = 1 + changes = 1 + + def makeState0(self): + s0 = _State(0, []) + for rule in self.newrules[self._START]: + s0.items.append((rule, 0)) + return s0 + + def finalState(self, tokens): + # + # Yuck. + # + if len(self.newrules[self._START]) == 2 and len(tokens) == 0: + return 1 + start = self.rules[self._START][0][1][1] + return self.goto(1, start) + + def makeNewRules(self): + worklist = [] + for rulelist in self.rules.values(): + for rule in rulelist: + worklist.append((rule, 0, 1, rule)) + + for rule, i, candidate, oldrule in worklist: + lhs, rhs = rule + n = len(rhs) + while i < n: + sym = rhs[i] + if not self.rules.has_key(sym) or \ + not self.nullable[sym]: + candidate = 0 + i = i + 1 + continue + + newrhs = list(rhs) + newrhs[i] = self._NULLABLE+sym + newrule = (lhs, tuple(newrhs)) + worklist.append((newrule, i+1, + candidate, oldrule)) + candidate = 0 + i = i + 1 + else: + if candidate: + lhs = self._NULLABLE+lhs + rule = (lhs, rhs) + if self.newrules.has_key(lhs): + self.newrules[lhs].append(rule) + else: + self.newrules[lhs] = [ rule ] + self.new2old[rule] = oldrule + + def typestring(self, token): + return None + + def error(self, token): + print "Syntax error at or near `%s' token" % token + raise SystemExit + + def parse(self, tokens): + sets = [ [(1,0), (2,0)] ] + self.links = {} + + if self.ruleschanged: + self.computeNull() + self.newrules = {} + self.new2old = {} + self.makeNewRules() + self.ruleschanged = 0 + self.edges, self.cores = {}, {} + self.states = { 0: self.makeState0() } + self.makeState(0, self._BOF) + + for i in xrange(len(tokens)): + sets.append([]) + + if sets[i] == []: + break + self.makeSet(tokens[i], sets, i) + else: + sets.append([]) + self.makeSet(None, sets, len(tokens)) + + #_dump(tokens, sets, self.states) + + finalitem = (self.finalState(tokens), 0) + if finalitem not in sets[-2]: + if len(tokens) > 0: + self.error(tokens[i-1]) + else: + self.error(None) + + return self.buildTree(self._START, finalitem, + tokens, len(sets)-2) + + def isnullable(self, sym): + # + # For symbols in G_e only. If we weren't supporting 1.5, + # could just use sym.startswith(). + # + return self._NULLABLE == sym[0:len(self._NULLABLE)] + + def skip(self, (lhs, rhs), pos=0): + n = len(rhs) + while pos < n: + if not self.isnullable(rhs[pos]): + break + pos = pos + 1 + return pos + + def makeState(self, state, sym): + assert sym is not None + # + # Compute \epsilon-kernel state's core and see if + # it exists already. + # + kitems = [] + for rule, pos in self.states[state].items: + lhs, rhs = rule + if rhs[pos:pos+1] == (sym,): + kitems.append((rule, self.skip(rule, pos+1))) + core = kitems + + core.sort() + tcore = tuple(core) + if self.cores.has_key(tcore): + return self.cores[tcore] + # + # Nope, doesn't exist. Compute it and the associated + # \epsilon-nonkernel state together; we'll need it right away. + # + k = self.cores[tcore] = len(self.states) + K, NK = _State(k, kitems), _State(k+1, []) + self.states[k] = K + predicted = {} + + edges = self.edges + rules = self.newrules + for X in K, NK: + worklist = X.items + for item in worklist: + rule, pos = item + lhs, rhs = rule + if pos == len(rhs): + X.complete.append(rule) + continue + + nextSym = rhs[pos] + key = (X.stateno, nextSym) + if not rules.has_key(nextSym): + if not edges.has_key(key): + edges[key] = None + X.T.append(nextSym) + else: + edges[key] = None + if not predicted.has_key(nextSym): + predicted[nextSym] = 1 + for prule in rules[nextSym]: + ppos = self.skip(prule) + new = (prule, ppos) + NK.items.append(new) + # + # Problem: we know K needs generating, but we + # don't yet know about NK. Can't commit anything + # regarding NK to self.edges until we're sure. Should + # we delay committing on both K and NK to avoid this + # hacky code? This creates other problems.. + # + if X is K: + edges = {} + + if NK.items == []: + return k + + # + # Check for \epsilon-nonkernel's core. Unfortunately we + # need to know the entire set of predicted nonterminals + # to do this without accidentally duplicating states. + # + core = predicted.keys() + core.sort() + tcore = tuple(core) + if self.cores.has_key(tcore): + self.edges[(k, None)] = self.cores[tcore] + return k + + nk = self.cores[tcore] = self.edges[(k, None)] = NK.stateno + self.edges.update(edges) + self.states[nk] = NK + return k + + def goto(self, state, sym): + key = (state, sym) + if not self.edges.has_key(key): + # + # No transitions from state on sym. + # + return None + + rv = self.edges[key] + if rv is None: + # + # Target state isn't generated yet. Remedy this. + # + rv = self.makeState(state, sym) + self.edges[key] = rv + return rv + + def gotoT(self, state, t): + return [self.goto(state, t)] + + def gotoST(self, state, st): + rv = [] + for t in self.states[state].T: + if st == t: + rv.append(self.goto(state, t)) + return rv + + def add(self, set, item, i=None, predecessor=None, causal=None): + if predecessor is None: + if item not in set: + set.append(item) + else: + key = (item, i) + if item not in set: + self.links[key] = [] + set.append(item) + self.links[key].append((predecessor, causal)) + + def makeSet(self, token, sets, i): + cur, next = sets[i], sets[i+1] + + ttype = token is not None and self.typestring(token) or None + if ttype is not None: + fn, arg = self.gotoT, ttype + else: + fn, arg = self.gotoST, token + + for item in cur: + ptr = (item, i) + state, parent = item + add = fn(state, arg) + for k in add: + if k is not None: + self.add(next, (k, parent), i+1, ptr) + nk = self.goto(k, None) + if nk is not None: + self.add(next, (nk, i+1)) + + if parent == i: + continue + + for rule in self.states[state].complete: + lhs, rhs = rule + for pitem in sets[parent]: + pstate, pparent = pitem + k = self.goto(pstate, lhs) + if k is not None: + why = (item, i, rule) + pptr = (pitem, parent) + self.add(cur, (k, pparent), + i, pptr, why) + nk = self.goto(k, None) + if nk is not None: + self.add(cur, (nk, i)) + + def makeSet_fast(self, token, sets, i): + # + # Call *only* when the entire state machine has been built! + # It relies on self.edges being filled in completely, and + # then duplicates and inlines code to boost speed at the + # cost of extreme ugliness. + # + cur, next = sets[i], sets[i+1] + ttype = token is not None and self.typestring(token) or None + + for item in cur: + ptr = (item, i) + state, parent = item + if ttype is not None: + k = self.edges.get((state, ttype), None) + if k is not None: + #self.add(next, (k, parent), i+1, ptr) + #INLINED --v + new = (k, parent) + key = (new, i+1) + if new not in next: + self.links[key] = [] + next.append(new) + self.links[key].append((ptr, None)) + #INLINED --^ + #nk = self.goto(k, None) + nk = self.edges.get((k, None), None) + if nk is not None: + #self.add(next, (nk, i+1)) + #INLINED --v + new = (nk, i+1) + if new not in next: + next.append(new) + #INLINED --^ + else: + add = self.gotoST(state, token) + for k in add: + if k is not None: + self.add(next, (k, parent), i+1, ptr) + #nk = self.goto(k, None) + nk = self.edges.get((k, None), None) + if nk is not None: + self.add(next, (nk, i+1)) + + if parent == i: + continue + + for rule in self.states[state].complete: + lhs, rhs = rule + for pitem in sets[parent]: + pstate, pparent = pitem + #k = self.goto(pstate, lhs) + k = self.edges.get((pstate, lhs), None) + if k is not None: + why = (item, i, rule) + pptr = (pitem, parent) + #self.add(cur, (k, pparent), + # i, pptr, why) + #INLINED --v + new = (k, pparent) + key = (new, i) + if new not in cur: + self.links[key] = [] + cur.append(new) + self.links[key].append((pptr, why)) + #INLINED --^ + #nk = self.goto(k, None) + nk = self.edges.get((k, None), None) + if nk is not None: + #self.add(cur, (nk, i)) + #INLINED --v + new = (nk, i) + if new not in cur: + cur.append(new) + #INLINED --^ + + def predecessor(self, key, causal): + for p, c in self.links[key]: + if c == causal: + return p + assert 0 + + def causal(self, key): + links = self.links[key] + if len(links) == 1: + return links[0][1] + choices = [] + rule2cause = {} + for p, c in links: + rule = c[2] + choices.append(rule) + rule2cause[rule] = c + return rule2cause[self.ambiguity(choices)] + + def deriveEpsilon(self, nt): + if len(self.newrules[nt]) > 1: + rule = self.ambiguity(self.newrules[nt]) + else: + rule = self.newrules[nt][0] + #print rule + + rhs = rule[1] + attr = [None] * len(rhs) + + for i in range(len(rhs)-1, -1, -1): + attr[i] = self.deriveEpsilon(rhs[i]) + return self.rule2func[self.new2old[rule]](attr) + + def buildTree(self, nt, item, tokens, k): + state, parent = item + + choices = [] + for rule in self.states[state].complete: + if rule[0] == nt: + choices.append(rule) + rule = choices[0] + if len(choices) > 1: + rule = self.ambiguity(choices) + #print rule + + rhs = rule[1] + attr = [None] * len(rhs) + + for i in range(len(rhs)-1, -1, -1): + sym = rhs[i] + if not self.newrules.has_key(sym): + if sym != self._BOF: + attr[i] = tokens[k-1] + key = (item, k) + item, k = self.predecessor(key, None) + #elif self.isnullable(sym): + elif self._NULLABLE == sym[0:len(self._NULLABLE)]: + attr[i] = self.deriveEpsilon(sym) + else: + key = (item, k) + why = self.causal(key) + attr[i] = self.buildTree(sym, why[0], + tokens, why[1]) + item, k = self.predecessor(key, why) + return self.rule2func[self.new2old[rule]](attr) + + def ambiguity(self, rules): + # + # XXX - problem here and in collectRules() if the same rule + # appears in >1 method. Also undefined results if rules + # causing the ambiguity appear in the same method. + # + sortlist = [] + name2index = {} + for i in range(len(rules)): + lhs, rhs = rule = rules[i] + name = self.rule2name[self.new2old[rule]] + sortlist.append((len(rhs), name)) + name2index[name] = i + sortlist.sort() + list = map(lambda (a,b): b, sortlist) + return rules[name2index[self.resolve(list)]] + + def resolve(self, list): + # + # Resolve ambiguity in favor of the shortest RHS. + # Since we walk the tree from the top down, this + # should effectively resolve in favor of a "shift". + # + return list[0] + +# +# GenericASTBuilder automagically constructs a concrete/abstract syntax tree +# for a given input. The extra argument is a class (not an instance!) +# which supports the "__setslice__" and "__len__" methods. +# +# XXX - silently overrides any user code in methods. +# + +class GenericASTBuilder(GenericParser): + def __init__(self, AST, start): + GenericParser.__init__(self, start) + self.AST = AST + + def preprocess(self, rule, func): + rebind = lambda lhs, self=self: \ + lambda args, lhs=lhs, self=self: \ + self.buildASTNode(args, lhs) + lhs, rhs = rule + return rule, rebind(lhs) + + def buildASTNode(self, args, lhs): + children = [] + for arg in args: + if isinstance(arg, self.AST): + children.append(arg) + else: + children.append(self.terminal(arg)) + return self.nonterminal(lhs, children) + + def terminal(self, token): return token + + def nonterminal(self, type, args): + rv = self.AST(type) + rv[:len(args)] = args + return rv + +# +# GenericASTTraversal is a Visitor pattern according to Design Patterns. For +# each node it attempts to invoke the method n_<node type>, falling +# back onto the default() method if the n_* can't be found. The preorder +# traversal also looks for an exit hook named n_<node type>_exit (no default +# routine is called if it's not found). To prematurely halt traversal +# of a subtree, call the prune() method -- this only makes sense for a +# preorder traversal. Node type is determined via the typestring() method. +# + +class GenericASTTraversalPruningException: + pass + +class GenericASTTraversal: + def __init__(self, ast): + self.ast = ast + + def typestring(self, node): + return node.type + + def prune(self): + raise GenericASTTraversalPruningException + + def preorder(self, node=None): + if node is None: + node = self.ast + + try: + name = 'n_' + self.typestring(node) + if hasattr(self, name): + func = getattr(self, name) + func(node) + else: + self.default(node) + except GenericASTTraversalPruningException: + return + + for kid in node: + self.preorder(kid) + + name = name + '_exit' + if hasattr(self, name): + func = getattr(self, name) + func(node) + + def postorder(self, node=None): + if node is None: + node = self.ast + + for kid in node: + self.postorder(kid) + + name = 'n_' + self.typestring(node) + if hasattr(self, name): + func = getattr(self, name) + func(node) + else: + self.default(node) + + + def default(self, node): + pass + +# +# GenericASTMatcher. AST nodes must have "__getitem__" and "__cmp__" +# implemented. +# +# XXX - makes assumptions about how GenericParser walks the parse tree. +# + +class GenericASTMatcher(GenericParser): + def __init__(self, start, ast): + GenericParser.__init__(self, start) + self.ast = ast + + def preprocess(self, rule, func): + rebind = lambda func, self=self: \ + lambda args, func=func, self=self: \ + self.foundMatch(args, func) + lhs, rhs = rule + rhslist = list(rhs) + rhslist.reverse() + + return (lhs, tuple(rhslist)), rebind(func) + + def foundMatch(self, args, func): + func(args[-1]) + return args[-1] + + def match_r(self, node): + self.input.insert(0, node) + children = 0 + + for child in node: + if children == 0: + self.input.insert(0, '(') + children = children + 1 + self.match_r(child) + + if children > 0: + self.input.insert(0, ')') + + def match(self, ast=None): + if ast is None: + ast = self.ast + self.input = [] + + self.match_r(ast) + self.parse(self.input) + + def resolve(self, list): + # + # Resolve ambiguity in favor of the longest RHS. + # + return list[-1] + +def _dump(tokens, sets, states): + for i in range(len(sets)): + print 'set', i + for item in sets[i]: + print '\t', item + for (lhs, rhs), pos in states[item[0]].items: + print '\t\t', lhs, '::=', + print string.join(rhs[:pos]), + print '.', + print string.join(rhs[pos:]) + if i < len(tokens): + print + print 'token', str(tokens[i]) + print This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <zy...@us...> - 2008-01-02 07:22:19
|
Revision: 3953 http://jython.svn.sourceforge.net/jython/?rev=3953&view=rev Author: zyasoft Date: 2008-01-01 23:22:17 -0800 (Tue, 01 Jan 2008) Log Message: ----------- Added sandbox for jbaker (aka zyasoft), in particular to track changes necessary to run Django for the time being, until we can get around to putting them in a better place Added Paths: ----------- trunk/sandbox/jbaker/ trunk/sandbox/jbaker/django/ trunk/sandbox/jbaker/django/db/ trunk/sandbox/jbaker/django/db/backends/ trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/ trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/__init__.py trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/base.py trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/client.py trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/creation.py trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/introspection.py trunk/sandbox/jbaker/django/dispatch/ trunk/sandbox/jbaker/django/dispatch/robustapply.py trunk/sandbox/jbaker/django/test/ trunk/sandbox/jbaker/django/test/utils.py trunk/sandbox/jbaker/django/views/ trunk/sandbox/jbaker/django/views/debug.py Added: trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/__init__.py =================================================================== Added: trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/base.py =================================================================== --- trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/base.py (rev 0) +++ trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/base.py 2008-01-02 07:22:17 UTC (rev 3953) @@ -0,0 +1,170 @@ +""" +PostgreSQL database backend for Django. + +Requires zxJDBC + PostgreSQL JDBC Driver +""" +import datetime +import sys + +from django.db.backends import BaseDatabaseWrapper, BaseDatabaseFeatures +from django.db.backends.postgresql.operations import DatabaseOperations +from django.utils.functional import Promise +from django.utils.encoding import force_unicode + +try: + from com.ziclix.python.sql import zxJDBC as Database + from com.ziclix.python.sql import FilterDataHandler, JDBC30DataHandler + from java.sql import Timestamp, Date, Time, Types + from java.util import Calendar, GregorianCalendar + from org.python.core import Py +except ImportError, e: + from django.core.exceptions import ImproperlyConfigured + raise ImproperlyConfigured("Error loading zxJDBC module: %s" % e) + + +DatabaseError = Database.DatabaseError +IntegrityError = Database.IntegrityError + +class DatabaseFeatures(BaseDatabaseFeatures): + needs_datetime_string_cast = False + +class DatabaseWrapper(BaseDatabaseWrapper): + features = DatabaseFeatures() + ops = DatabaseOperations() + operators = { + 'exact': '= %s', + 'iexact': 'ILIKE %s', + 'contains': 'LIKE %s', + 'icontains': 'ILIKE %s', + 'regex': '~ %s', + 'iregex': '~* %s', + 'gt': '> %s', + 'gte': '>= %s', + 'lt': '< %s', + 'lte': '<= %s', + 'startswith': 'LIKE %s', + 'endswith': 'LIKE %s', + 'istartswith': 'ILIKE %s', + 'iendswith': 'ILIKE %s', + } + + def _cursor(self, settings): + set_tz = False + if self.connection is None: + set_tz = True + if settings.DATABASE_NAME == '': + from django.core.exceptions import ImproperlyConfigured + raise ImproperlyConfigured("You need to specify DATABASE_NAME in your Django settings file.") + host = settings.DATABASE_HOST or 'localhost' + port = settings.DATABASE_PORT and (':' + settings.DATABASE_PORT) or '' + conn_string = "jdbc:postgresql://%s%s/%s" % (host, port, + settings.DATABASE_NAME) + self.connection = Database.connect(conn_string, + settings.DATABASE_USER, + settings.DATABASE_PASSWORD, + 'org.postgresql.Driver', + **self.options) + # make transactions transparent to all cursors + from java.sql import Connection + jdbc_conn = self.connection.__connection__ + jdbc_conn.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED) + real_cursor = self.connection.cursor() + # setup the JDBC3 DataHandler and our data handler + real_cursor.datahandler = JDBC30DataHandler(DjangoDataHandler(real_cursor.datahandler)) + cursor = CursorWrapper(real_cursor) + if set_tz: + # XXX: settings.TIME_ZONE could inject SQL + # Does it matter? If someone can edit settings.py, it already + # have the user and password! + cursor.execute("SET TIME ZONE %s" % + self.ops.quote_name(settings.TIME_ZONE)) + + return cursor + +class DjangoDataHandler(FilterDataHandler): + def getPyObject(self, set, col, datatype): + "Convert Java types into Python ones" + if datatype in (Types.VARCHAR, Types.CHAR): + return Py.newUnicode(set.getString(col)) + elif datatype == Types.TIMESTAMP: + # Convert java.sql.TimeStamp into datetime + cal = GregorianCalendar() + cal.time = set.getTimestamp(col) + return datetime.datetime(cal.get(Calendar.YEAR), + cal.get(Calendar.MONTH) + 1, + cal.get(Calendar.DAY_OF_MONTH), + cal.get(Calendar.HOUR_OF_DAY), + cal.get(Calendar.MINUTE), + cal.get(Calendar.SECOND), + cal.get(Calendar.MILLISECOND) * 1000) + elif datatype == Types.TIME: + # Convert java.sql.Time into time + cal = GregorianCalendar() + cal.time = set.getTime(col) + return datetime.time(cal.get(Calendar.HOUR_OF_DAY), + cal.get(Calendar.MINUTE), + cal.get(Calendar.SECOND), + cal.get(Calendar.MILLISECOND) * 1000) + elif datatype == Types.DATE: + # Convert java.sql.Date into datetime + cal = GregorianCalendar() + cal.time = set.getDate(col) + return datetime.date(cal.get(Calendar.YEAR), + cal.get(Calendar.MONTH) + 1, + cal.get(Calendar.DAY_OF_MONTH)) + else: + return FilterDataHandler.getPyObject(self, set, col, datatype) + + def _hour_minute_second_micro(self, s): + hour, minute, second_and_microsecond = s.split(':') + if '.' in second_and_microsecond: + second, microsecond = second_and_microsecond.split('.') + else: + second, microsecond = second_and_microsecond, 0 + return int(hour), int(minute), int(second), int(microsecond) + + def _year_month_day(self, s): + return [int(x) for x in s.split('-')] + + def setJDBCObject(self, stmt, index, obj, datatype=None) : + "Convert Django-models types into Java ones" + if datatype is None: + FilterDataHandler.setJDBCObject(self, stmt, index, obj) + return + if datatype == Types.TIMESTAMP and isinstance(obj, basestring): + # Convert string into java.sql.Timestamp + # The string is generated by Django using datetime.__str__ , + # so the format is year-month-day hour:minute:second.microsecond + d, t = obj.split(' ') + hour, minute, second, microsecond = self._hour_minute_second_micro(t) + year, month, day = self._year_month_day(d) + # FIXME: This ignores microseconds + obj = Database.Timestamp(year, month, day, hour, minute, second) # Database is an alias for zxJDBC + elif datatype == Types.TIME and isinstance(obj, basestring): + # Convert string into java.sql.Time + hour, minute, second, microsecond = self._hour_minute_second_micro(obj) + # FIXME: This ignores microseconds + obj = Database.Time(int(hour), int(minute), int(second)) + elif datatype == Types.DATE and isinstance(obj, basestring): + year, month, day = self._year_month_day(obj) + obj = Database.Date(year, month, day) + FilterDataHandler.setJDBCObject(self, stmt, index, obj, datatype) + + +class CursorWrapper(object): + """ + A simple wrapper to do the "%s" -> "?" replacement before running zxJDBC's + execute or executemany + """ + def __init__(self, cursor): + self.cursor = cursor + def execute(self, sql, params=()): + self.cursor.execute(sql.replace('%s', '?'), params) + def executemany(self, sql, param_list): + self.cursor.executemany(sql.replace('%s', '?'), param_list) + def __getattr__(self, attr): + if attr in self.__dict__: + return self.__dict__[attr] + else: + return getattr(self.cursor, attr) + Added: trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/client.py =================================================================== --- trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/client.py (rev 0) +++ trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/client.py 2008-01-02 07:22:17 UTC (rev 3953) @@ -0,0 +1 @@ +from django.db.backends.postgresql.client import * Added: trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/creation.py =================================================================== --- trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/creation.py (rev 0) +++ trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/creation.py 2008-01-02 07:22:17 UTC (rev 3953) @@ -0,0 +1 @@ +from django.db.backends.postgresql.creation import * Added: trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/introspection.py =================================================================== --- trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/introspection.py (rev 0) +++ trunk/sandbox/jbaker/django/db/backends/postgresql_zxjdbc/introspection.py 2008-01-02 07:22:17 UTC (rev 3953) @@ -0,0 +1,90 @@ +# XXX: Almost a exact copy & paste from postgresql_psycopg2/operations.py +# +# One can't just import that module because it imports +# postgresql_psycopg2.base. +# +# Once #5461 is commited into trunk, this duplication should go away + +from django.db.backends.postgresql_zxjdbc.base import DatabaseOperations + +quote_name = DatabaseOperations().quote_name + +def get_table_list(cursor): + "Returns a list of table names in the current database." + cursor.execute(""" + SELECT c.relname + FROM pg_catalog.pg_class c + LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE c.relkind IN ('r', 'v', '') + AND n.nspname NOT IN ('pg_catalog', 'pg_toast') + AND pg_catalog.pg_table_is_visible(c.oid)""") + return [row[0] for row in cursor.fetchall()] + +def get_table_description(cursor, table_name): + "Returns a description of the table, with the DB-API cursor.description interface." + cursor.execute("SELECT * FROM %s LIMIT 1" % quote_name(table_name)) + return cursor.description + +def get_relations(cursor, table_name): + """ + Returns a dictionary of {field_index: (field_index_other_table, other_table)} + representing all relationships to the given table. Indexes are 0-based. + """ + cursor.execute(""" + SELECT con.conkey, con.confkey, c2.relname + FROM pg_constraint con, pg_class c1, pg_class c2 + WHERE c1.oid = con.conrelid + AND c2.oid = con.confrelid + AND c1.relname = %s + AND con.contype = 'f'""", [table_name]) + relations = {} + for row in cursor.fetchall(): + # row[0] and row[1] are single-item lists, so grab the single item. + relations[row[0][0] - 1] = (row[1][0] - 1, row[2]) + return relations + +def get_indexes(cursor, table_name): + """ + Returns a dictionary of fieldname -> infodict for the given table, + where each infodict is in the format: + {'primary_key': boolean representing whether it's the primary key, + 'unique': boolean representing whether it's a unique index} + """ + # This query retrieves each index on the given table, including the + # first associated field name + cursor.execute(""" + SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary + FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, + pg_catalog.pg_index idx, pg_catalog.pg_attribute attr + WHERE c.oid = idx.indrelid + AND idx.indexrelid = c2.oid + AND attr.attrelid = c.oid + AND attr.attnum = idx.indkey[0] + AND c.relname = %s""", [table_name]) + indexes = {} + for row in cursor.fetchall(): + # row[1] (idx.indkey) is stored in the DB as an array. It comes out as + # a string of space-separated integers. This designates the field + # indexes (1-based) of the fields that have indexes on the table. + # Here, we skip any indexes across multiple fields. + if ' ' in row[1]: + continue + indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]} + return indexes + +# Maps type codes to Django Field types. +DATA_TYPES_REVERSE = { + 16: 'BooleanField', + 21: 'SmallIntegerField', + 23: 'IntegerField', + 25: 'TextField', + 701: 'FloatField', + 869: 'IPAddressField', + 1043: 'CharField', + 1082: 'DateField', + 1083: 'TimeField', + 1114: 'DateTimeField', + 1184: 'DateTimeField', + 1266: 'TimeField', + 1700: 'DecimalField', +} Added: trunk/sandbox/jbaker/django/dispatch/robustapply.py =================================================================== --- trunk/sandbox/jbaker/django/dispatch/robustapply.py (rev 0) +++ trunk/sandbox/jbaker/django/dispatch/robustapply.py 2008-01-02 07:22:17 UTC (rev 3953) @@ -0,0 +1,50 @@ +"""Robust apply mechanism + +Provides a function "call", which can sort out +what arguments a given callable object can take, +and subset the given arguments to match only +those which are acceptable. +""" + +def function( receiver ): + """Get function-like callable object for given receiver + + returns (function_or_method, codeObject, fromMethod) + + If fromMethod is true, then the callable already + has its first argument bound + """ + if hasattr(receiver, '__call__'): + # receiver is a class instance; assume it is callable. + # Reassign receiver to the actual method that will be called. + if hasattr( receiver.__call__, 'im_func') or hasattr( receiver.__call__, 'im_code'): + # jython's reflected functions are also class instances + # but they don't have func_code + if type(receiver.__call__.im_func).__name__ != 'reflectedfunction': + receiver = receiver.__call__ + if hasattr( receiver, 'im_func' ): + # an instance-method... + return receiver, receiver.im_func.func_code, 1 + elif not hasattr( receiver, 'func_code'): + raise ValueError('unknown reciever type %s %s'%(receiver, type(receiver))) + return receiver, receiver.func_code, 0 + +def robustApply(receiver, *arguments, **named): + """Call receiver with arguments and an appropriate subset of named + """ + receiver, codeObject, startIndex = function( receiver ) + acceptable = codeObject.co_varnames[startIndex+len(arguments):codeObject.co_argcount] + for name in codeObject.co_varnames[startIndex:startIndex+len(arguments)]: + if named.has_key( name ): + raise TypeError( + """Argument %r specified both positionally and as a keyword for calling %r"""% ( + name, receiver, + ) + ) + if not (codeObject.co_flags & 8): + # fc does not have a **kwds type parameter, therefore + # remove unacceptable arguments. + for arg in named.keys(): + if arg not in acceptable: + del named[arg] + return receiver(*arguments, **named) Added: trunk/sandbox/jbaker/django/test/utils.py =================================================================== --- trunk/sandbox/jbaker/django/test/utils.py (rev 0) +++ trunk/sandbox/jbaker/django/test/utils.py 2008-01-02 07:22:17 UTC (rev 3953) @@ -0,0 +1,190 @@ +import sys, time +from django.conf import settings +from django.db import connection, get_creation_module +from django.core import mail +from django.core.management import call_command +from django.dispatch import dispatcher +from django.test import signals +from django.template import Template + +# The prefix to put on the default database name when creating +# the test database. +TEST_DATABASE_PREFIX = 'test_' + +def instrumented_test_render(self, context): + """ + An instrumented Template render method, providing a signal + that can be intercepted by the test system Client + """ + dispatcher.send(signal=signals.template_rendered, sender=self, template=self, context=context) + return self.nodelist.render(context) + +class TestSMTPConnection(object): + """A substitute SMTP connection for use during test sessions. + The test connection stores email messages in a dummy outbox, + rather than sending them out on the wire. + + """ + def __init__(*args, **kwargs): + pass + def open(self): + "Mock the SMTPConnection open() interface" + pass + def close(self): + "Mock the SMTPConnection close() interface" + pass + def send_messages(self, messages): + "Redirect messages to the dummy outbox" + mail.outbox.extend(messages) + return len(messages) + +def setup_test_environment(): + """Perform any global pre-test setup. This involves: + + - Installing the instrumented test renderer + - Diverting the email sending functions to a test buffer + + """ + Template.original_render = Template.render + Template.render = instrumented_test_render + + mail.original_SMTPConnection = mail.SMTPConnection + mail.SMTPConnection = TestSMTPConnection + + mail.outbox = [] + +def teardown_test_environment(): + """Perform any global post-test teardown. This involves: + + - Restoring the original test renderer + - Restoring the email sending functions + + """ + Template.render = Template.original_render + del Template.original_render + + mail.SMTPConnection = mail.original_SMTPConnection + del mail.original_SMTPConnection + + del mail.outbox + +def _set_autocommit(connection): + "Make sure a connection is in autocommit mode." + # XXX Shouldn't it be part of backend interface? + if hasattr(connection.connection, "autocommit"): + if callable(connection.connection.autocommit): + connection.connection.autocommit(True) + else: + connection.connection.autocommit = True + elif hasattr(connection.connection, "set_isolation_level"): + connection.connection.set_isolation_level(0) + +def get_mysql_create_suffix(): + suffix = [] + if settings.TEST_DATABASE_CHARSET: + suffix.append('CHARACTER SET %s' % settings.TEST_DATABASE_CHARSET) + if settings.TEST_DATABASE_COLLATION: + suffix.append('COLLATE %s' % settings.TEST_DATABASE_COLLATION) + return ' '.join(suffix) + +def get_postgresql_create_suffix(): + assert settings.TEST_DATABASE_COLLATION is None, "PostgreSQL does not support collation setting at database creation time." + if settings.TEST_DATABASE_CHARSET: + return "WITH ENCODING '%s'" % settings.TEST_DATABASE_CHARSET + return '' + +def create_test_db(verbosity=1, autoclobber=False): + """ + Creates a test database, prompting the user for confirmation if the + database already exists. Returns the name of the test database created. + """ + # If the database backend wants to create the test DB itself, let it + creation_module = get_creation_module() + if hasattr(creation_module, "create_test_db"): + creation_module.create_test_db(settings, connection, verbosity, autoclobber) + return + + if verbosity >= 1: + print "Creating test database..." + # If we're using SQLite, it's more convenient to test against an + # in-memory database. + if settings.DATABASE_ENGINE == "sqlite3": + TEST_DATABASE_NAME = ":memory:" + else: + suffix = { + 'postgresql': get_postgresql_create_suffix, + 'postgresql_psycopg2': get_postgresql_create_suffix, + 'mysql': get_mysql_create_suffix, + 'mysql_old': get_mysql_create_suffix, + }.get(settings.DATABASE_ENGINE, lambda: '')() + if settings.TEST_DATABASE_NAME: + TEST_DATABASE_NAME = settings.TEST_DATABASE_NAME + else: + TEST_DATABASE_NAME = TEST_DATABASE_PREFIX + settings.DATABASE_NAME + + qn = connection.ops.quote_name + + # Create the test database and connect to it. We need to autocommit + # if the database supports it because PostgreSQL doesn't allow + # CREATE/DROP DATABASE statements within transactions. + cursor = connection.cursor() + _set_autocommit(connection) + try: + cursor.execute("CREATE DATABASE %s %s" % (qn(TEST_DATABASE_NAME), suffix)) + except Exception, e: + sys.stderr.write("Got an error creating the test database: %s\n" % e) + if not autoclobber: + confirm = raw_input("Type 'yes' if you would like to try deleting the test database '%s', or 'no' to cancel: " % TEST_DATABASE_NAME) + if autoclobber or confirm == 'yes': + try: + if verbosity >= 1: + print "Destroying old test database..." + cursor.execute("DROP DATABASE %s" % qn(TEST_DATABASE_NAME)) + if verbosity >= 1: + print "Creating test database..." + cursor.execute("CREATE DATABASE %s %s" % (qn(TEST_DATABASE_NAME), suffix)) + except Exception, e: + sys.stderr.write("Got an error recreating the test database: %s\n" % e) + sys.exit(2) + else: + print "Tests cancelled." + sys.exit(1) + + connection.close() + settings.DATABASE_NAME = TEST_DATABASE_NAME + + call_command('syncdb', verbosity=verbosity, interactive=False) + + if settings.CACHE_BACKEND.startswith('db://'): + cache_name = settings.CACHE_BACKEND[len('db://'):] + call_command('createcachetable', cache_name) + + # Get a cursor (even though we don't need one yet). This has + # the side effect of initializing the test database. + cursor = connection.cursor() + + return TEST_DATABASE_NAME + +def destroy_test_db(old_database_name, verbosity=1): + # If the database wants to drop the test DB itself, let it + creation_module = get_creation_module() + if hasattr(creation_module, "destroy_test_db"): + creation_module.destroy_test_db(settings, connection, old_database_name, verbosity) + return + + # Unless we're using SQLite, remove the test database to clean up after + # ourselves. Connect to the previous database (not the test database) + # to do so, because it's not allowed to delete a database while being + # connected to it. + if verbosity >= 1: + print "Destroying test database..." + connection.close() + TEST_DATABASE_NAME = settings.DATABASE_NAME + settings.DATABASE_NAME = old_database_name + + if settings.DATABASE_ENGINE != "sqlite3": + cursor = connection.cursor() + _set_autocommit(connection) + time.sleep(1) # To avoid "database is being accessed by other users" errors. + cursor.execute("DROP DATABASE %s" % connection.ops.quote_name(TEST_DATABASE_NAME)) + connection.close() Added: trunk/sandbox/jbaker/django/views/debug.py =================================================================== --- trunk/sandbox/jbaker/django/views/debug.py (rev 0) +++ trunk/sandbox/jbaker/django/views/debug.py 2008-01-02 07:22:17 UTC (rev 3953) @@ -0,0 +1,756 @@ +import os +import re +import sys +import logging + +from django.conf import settings +from django.template import Template, Context, TemplateDoesNotExist +from django.utils.html import escape +from django.http import HttpResponseServerError, HttpResponseNotFound +from django.utils.encoding import smart_unicode + +HIDDEN_SETTINGS = re.compile('SECRET|PASSWORD|PROFANITIES_LIST') + +def linebreak_iter(template_source): + yield 0 + p = template_source.find('\n') + while p >= 0: + yield p+1 + p = template_source.find('\n', p+1) + yield len(template_source) + 1 + +def get_template_exception_info(exc_type, exc_value, tb): + origin, (start, end) = exc_value.source + template_source = origin.reload() + context_lines = 10 + line = 0 + upto = 0 + source_lines = [] + before = during = after = "" + for num, next in enumerate(linebreak_iter(template_source)): + if start >= upto and end <= next: + line = num + before = escape(template_source[upto:start]) + during = escape(template_source[start:end]) + after = escape(template_source[end:next]) + source_lines.append( (num, escape(template_source[upto:next])) ) + upto = next + total = len(source_lines) + + top = max(1, line - context_lines) + bottom = min(total, line + 1 + context_lines) + + template_info = { + 'message': exc_value.args[0], + 'source_lines': source_lines[top:bottom], + 'before': before, + 'during': during, + 'after': after, + 'top': top, + 'bottom': bottom, + 'total': total, + 'line': line, + 'name': origin.name, + } + exc_info = hasattr(exc_value, 'exc_info') and exc_value.exc_info or (exc_type, exc_value, tb) + return exc_info + (template_info,) + +def get_safe_settings(): + "Returns a dictionary of the settings module, with sensitive settings blurred out." + settings_dict = {} + for k in dir(settings): + if k.isupper(): + if HIDDEN_SETTINGS.search(k): + settings_dict[k] = '********************' + else: + settings_dict[k] = getattr(settings, k) + return settings_dict + +def technical_500_response(request, exc_type, exc_value, tb): + """ + Create a technical server error response. The last three arguments are + the values returned from sys.exc_info() and friends. + """ + template_info = None + template_does_not_exist = False + loader_debug_info = None + if issubclass(exc_type, TemplateDoesNotExist): + from django.template.loader import template_source_loaders + template_does_not_exist = True + loader_debug_info = [] + for loader in template_source_loaders: + try: + source_list_func = getattr(__import__(loader.__module__, {}, {}, ['get_template_sources']), 'get_template_sources') + # NOTE: This assumes exc_value is the name of the template that + # the loader attempted to load. + template_list = [{'name': t, 'exists': os.path.exists(t)} \ + for t in source_list_func(str(exc_value))] + except (ImportError, AttributeError): + template_list = [] + loader_debug_info.append({ + 'loader': loader.__module__ + '.' + loader.__name__, + 'templates': template_list, + }) + if settings.TEMPLATE_DEBUG and hasattr(exc_value, 'source'): + exc_type, exc_value, tb, template_info = get_template_exception_info(exc_type, exc_value, tb) + frames = [] + while tb is not None: + # support for __traceback_hide__ which is used by a few libraries + # to hide internal frames. + if tb.tb_frame.f_locals.get('__traceback_hide__'): + tb = tb.tb_next + continue + filename = tb.tb_frame.f_code.co_filename + function = tb.tb_frame.f_code.co_name + lineno = tb.tb_lineno - 1 + loader = tb.tb_frame.f_globals.get('__loader__') + module_name = tb.tb_frame.f_globals.get('__name__') + pre_context_lineno, pre_context, context_line, post_context = _get_lines_from_file(filename, lineno, 7, loader, module_name) + if pre_context_lineno is not None: + frames.append({ + 'tb': tb, + 'filename': filename, + 'function': function, + 'lineno': lineno + 1, + 'vars': tb.tb_frame.f_locals.items(), + 'id': id(tb), + 'pre_context': pre_context, + 'context_line': context_line, + 'post_context': post_context, + 'pre_context_lineno': pre_context_lineno + 1, + }) + tb = tb.tb_next + + if not frames: + frames = [{ + 'filename': '<unknown>', + 'function': '?', + 'lineno': '?', + }] + + unicode_hint = '' + if issubclass(exc_type, UnicodeError): + start = getattr(exc_value, 'start', None) + end = getattr(exc_value, 'end', None) + if start is not None and end is not None: + unicode_str = exc_value.args[1] + unicode_hint = smart_unicode(unicode_str[max(start-5, 0):min(end+5, len(unicode_str))], 'ascii', errors='replace') + from django import get_version + t = Template(TECHNICAL_500_TEMPLATE, name='Technical 500 template') + c = Context({ + 'exception_type': exc_type.__name__, + 'exception_value': smart_unicode(exc_value, errors='replace'), + 'unicode_hint': unicode_hint, + 'frames': frames, + 'lastframe': frames[-1], + 'request': request, + 'request_protocol': request.is_secure() and "https" or "http", + 'settings': get_safe_settings(), + 'sys_executable': sys.executable, + 'sys_version_info': '%d.%d.%d' % sys.version_info[0:3], + 'django_version_info': get_version(), + 'sys_path' : sys.path, + 'template_info': template_info, + 'template_does_not_exist': template_does_not_exist, + 'loader_debug_info': loader_debug_info, + }) + return HttpResponseServerError(t.render(c), mimetype='text/html') + +def technical_404_response(request, exception): + "Create a technical 404 error response. The exception should be the Http404." + try: + tried = exception.args[0]['tried'] + except (IndexError, TypeError): + tried = [] + else: + if not tried: + # tried exists but is an empty list. The URLconf must've been empty. + return empty_urlconf(request) + + t = Template(TECHNICAL_404_TEMPLATE, name='Technical 404 template') + c = Context({ + 'root_urlconf': settings.ROOT_URLCONF, + 'request_path': request.path[1:], # Trim leading slash + 'urlpatterns': tried, + 'reason': str(exception), + 'request': request, + 'request_protocol': request.is_secure() and "https" or "http", + 'settings': get_safe_settings(), + }) + return HttpResponseNotFound(t.render(c), mimetype='text/html') + +def empty_urlconf(request): + "Create an empty URLconf 404 error response." + t = Template(EMPTY_URLCONF_TEMPLATE, name='Empty URLConf template') + c = Context({ + 'project_name': settings.SETTINGS_MODULE.split('.')[0] + }) + return HttpResponseNotFound(t.render(c), mimetype='text/html') + +def _get_lines_from_file(filename, lineno, context_lines, loader=None, module_name=None): + """ + Returns context_lines before and after lineno from file. + Returns (pre_context_lineno, pre_context, context_line, post_context). + """ + source = None + if loader is not None: + source = loader.get_source(module_name).splitlines() + else: + try: + f = open(filename) + try: + source = f.readlines() + finally: + f.close() + except (OSError, IOError): + pass + if source is None: + return None, [], None, [] + + encoding = 'ascii' + for line in source[:2]: + # File coding may be specified. Match pattern from PEP-263 + # (http://www.python.org/dev/peps/pep-0263/) + match = re.search(r'coding[:=]\s*([-\w.]+)', line) + if match: + encoding = match.group(1) + break + source = [unicode(sline, encoding, 'replace') for sline in source] + + lower_bound = max(0, lineno - context_lines) + upper_bound = lineno + context_lines + + pre_context = [line.strip('\n') for line in source[lower_bound:lineno]] + try: + context_line = source[lineno].strip('\n') + except Exception, err: + logging.warn((filename, module_name, lineno, source)) + raise + post_context = [line.strip('\n') for line in source[lineno+1:upper_bound]] + + return lower_bound, pre_context, context_line, post_context + +# +# Templates are embedded in the file so that we know the error handler will +# always work even if the template loader is broken. +# + +TECHNICAL_500_TEMPLATE = """ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<html lang="en"> +<head> + <meta http-equiv="content-type" content="text/html; charset=utf-8"> + <meta name="robots" content="NONE,NOARCHIVE"> + <title>{{ exception_type }} at {{ request.path|escape }}</title> + <style type="text/css"> + html * { padding:0; margin:0; } + body * { padding:10px 20px; } + body * * { padding:0; } + body { font:small sans-serif; } + body>div { border-bottom:1px solid #ddd; } + h1 { font-weight:normal; } + h2 { margin-bottom:.8em; } + h2 span { font-size:80%; color:#666; font-weight:normal; } + h3 { margin:1em 0 .5em 0; } + h4 { margin:0 0 .5em 0; font-weight: normal; } + table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; } + tbody td, tbody th { vertical-align:top; padding:2px 3px; } + thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; } + tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; } + table.vars { margin:5px 0 2px 40px; } + table.vars td, table.req td { font-family:monospace; } + table td.code { width:100%; } + table td.code div { overflow:hidden; } + table.source th { color:#666; } + table.source td { font-family:monospace; white-space:pre; border-bottom:1px solid #eee; } + ul.traceback { list-style-type:none; } + ul.traceback li.frame { margin-bottom:1em; } + div.context { margin: 10px 0; } + div.context ol { padding-left:30px; margin:0 10px; list-style-position: inside; } + div.context ol li { font-family:monospace; white-space:pre; color:#666; cursor:pointer; } + div.context ol.context-line li { color:black; background-color:#ccc; } + div.context ol.context-line li span { float: right; } + div.commands { margin-left: 40px; } + div.commands a { color:black; text-decoration:none; } + #summary { background: #ffc; } + #summary h2 { font-weight: normal; color: #666; } + #explanation { background:#eee; } + #template, #template-not-exist { background:#f6f6f6; } + #template-not-exist ul { margin: 0 0 0 20px; } + #unicode-hint { background:#eee; } + #traceback { background:#eee; } + #requestinfo { background:#f6f6f6; padding-left:120px; } + #summary table { border:none; background:transparent; } + #requestinfo h2, #requestinfo h3 { position:relative; margin-left:-100px; } + #requestinfo h3 { margin-bottom:-1em; } + .error { background: #ffc; } + .specific { color:#cc3300; font-weight:bold; } + h2 span.commands { font-size:.7em;} + span.commands a:link {color:#5E5694;} + </style> + <script type="text/javascript"> + //<!-- + function getElementsByClassName(oElm, strTagName, strClassName){ + // Written by Jonathan Snook, http://www.snook.ca/jon; Add-ons by Robert Nyman, http://www.robertnyman.com + var arrElements = (strTagName == "*" && document.all)? document.all : + oElm.getElementsByTagName(strTagName); + var arrReturnElements = new Array(); + strClassName = strClassName.replace(/\-/g, "\\-"); + var oRegExp = new RegExp("(^|\\s)" + strClassName + "(\\s|$)"); + var oElement; + for(var i=0; i<arrElements.length; i++){ + oElement = arrElements[i]; + if(oRegExp.test(oElement.className)){ + arrReturnElements.push(oElement); + } + } + return (arrReturnElements) + } + function hideAll(elems) { + for (var e = 0; e < elems.length; e++) { + elems[e].style.display = 'none'; + } + } + window.onload = function() { + hideAll(getElementsByClassName(document, 'table', 'vars')); + hideAll(getElementsByClassName(document, 'ol', 'pre-context')); + hideAll(getElementsByClassName(document, 'ol', 'post-context')); + hideAll(getElementsByClassName(document, 'div', 'pastebin')); + } + function toggle() { + for (var i = 0; i < arguments.length; i++) { + var e = document.getElementById(arguments[i]); + if (e) { + e.style.display = e.style.display == 'none' ? 'block' : 'none'; + } + } + return false; + } + function varToggle(link, id) { + toggle('v' + id); + var s = link.getElementsByTagName('span')[0]; + var uarr = String.fromCharCode(0x25b6); + var darr = String.fromCharCode(0x25bc); + s.innerHTML = s.innerHTML == uarr ? darr : uarr; + return false; + } + function switchPastebinFriendly(link) { + s1 = "Switch to copy-and-paste view"; + s2 = "Switch back to interactive view"; + link.innerHTML = link.innerHTML == s1 ? s2 : s1; + toggle('browserTraceback', 'pastebinTraceback'); + return false; + } + //--> + </script> +</head> +<body> +<div id="summary"> + <h1>{{ exception_type }} at {{ request.path|escape }}</h1> + <h2>{{ exception_value|escape }}</h2> + <table class="meta"> + <tr> + <th>Request Method:</th> + <td>{{ request.META.REQUEST_METHOD }}</td> + </tr> + <tr> + <th>Request URL:</th> + <td>{{ request_protocol }}://{{ request.META.HTTP_HOST }}{{ request.path|escape }}</td> + </tr> + <tr> + <th>Exception Type:</th> + <td>{{ exception_type }}</td> + </tr> + <tr> + <th>Exception Value:</th> + <td>{{ exception_value|escape }}</td> + </tr> + <tr> + <th>Exception Location:</th> + <td>{{ lastframe.filename|escape }} in {{ lastframe.function|escape }}, line {{ lastframe.lineno }}</td> + </tr> + <tr> + <th>Python Executable:</th> + <td>{{ sys_executable|escape }}</td> + </tr> + <tr> + <th>Python Version:</th> + <td>{{ sys_version_info }}</td> + </tr> + <tr> + <th>Python Path:</th> + <td>{{ sys_path }}</td> + </tr> + </table> +</div> +{% if unicode_hint %} +<div id="unicode-hint"> + <h2>Unicode error hint</h2> + <p>The string that could not be encoded/decoded was: <strong>{{ unicode_hint|escape }}</strong></p> +</div> +{% endif %} +{% if template_does_not_exist %} +<div id="template-not-exist"> + <h2>Template-loader postmortem</h2> + {% if loader_debug_info %} + <p>Django tried loading these templates, in this order:</p> + <ul> + {% for loader in loader_debug_info %} + <li>Using loader <code>{{ loader.loader }}</code>: + <ul>{% for t in loader.templates %}<li><code>{{ t.name }}</code> (File {% if t.exists %}exists{% else %}does not exist{% endif %})</li>{% endfor %}</ul> + </li> + {% endfor %} + </ul> + {% else %} + <p>Django couldn't find any templates because your <code>TEMPLATE_LOADERS</code> setting is empty!</p> + {% endif %} +</div> +{% endif %} +{% if template_info %} +<div id="template"> + <h2>Template error</h2> + <p>In template <code>{{ template_info.name }}</code>, error at line <strong>{{ template_info.line }}</strong></p> + <h3>{{ template_info.message }}</h3> + <table class="source{% if template_info.top %} cut-top{% endif %}{% ifnotequal template_info.bottom template_info.total %} cut-bottom{% endifnotequal %}"> + {% for source_line in template_info.source_lines %} + {% ifequal source_line.0 template_info.line %} + <tr class="error"><th>{{ source_line.0 }}</th> + <td>{{ template_info.before }}<span class="specific">{{ template_info.during }}</span>{{ template_info.after }}</td></tr> + {% else %} + <tr><th>{{ source_line.0 }}</th> + <td>{{ source_line.1 }}</td></tr> + {% endifequal %} + {% endfor %} + </table> +</div> +{% endif %} +<div id="traceback"> + <h2>Traceback <span class="commands"><a href="#" onclick="return switchPastebinFriendly(this);">Switch to copy-and-paste view</a></span></h2> + {% autoescape off %} + <div id="browserTraceback"> + <ul class="traceback"> + {% for frame in frames %} + <li class="frame"> + <code>{{ frame.filename|escape }}</code> in <code>{{ frame.function|escape }}</code> + + {% if frame.context_line %} + <div class="context" id="c{{ frame.id }}"> + {% if frame.pre_context %} + <ol start="{{ frame.pre_context_lineno }}" class="pre-context" id="pre{{ frame.id }}">{% for line in frame.pre_context %}<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')">{{ line|escape }}</li>{% endfor %}</ol> + {% endif %} + <ol start="{{ frame.lineno }}" class="context-line"><li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')">{{ frame.context_line|escape }} <span>...</span></li></ol> + {% if frame.post_context %} + <ol start='{{ frame.lineno|add:"1" }}' class="post-context" id="post{{ frame.id }}">{% for line in frame.post_context %}<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')">{{ line|escape }}</li>{% endfor %}</ol> + {% endif %} + </div> + {% endif %} + + {% if frame.vars %} + <div class="commands"> + <a href="#" onclick="return varToggle(this, '{{ frame.id }}')"><span>▶</span> Local vars</a> + </div> + <table class="vars" id="v{{ frame.id }}"> + <thead> + <tr> + <th>Variable</th> + <th>Value</th> + </tr> + </thead> + <tbody> + {% for var in frame.vars|dictsort:"0" %} + <tr> + <td>{{ var.0|escape }}</td> + <td class="code"><div>{{ var.1|pprint|escape }}</div></td> + </tr> + {% endfor %} + </tbody> + </table> + {% endif %} + </li> + {% endfor %} + </ul> + </div> + {% endautoescape %} + <form action="http://dpaste.com/" name="pasteform" id="pasteform" method="post"> + <div id="pastebinTraceback" class="pastebin"> + <input type="hidden" name="language" value="PythonConsole"> + <input type="hidden" name="title" value="{{ exception_type|escape }} at {{ request.path|escape }}"> + <input type="hidden" name="source" value="Django Dpaste Agent"> + <input type="hidden" name="poster" value="Django"> + <textarea name="content" id="traceback_area" cols="140" rows="25"> +Environment: + +Request Method: {{ request.META.REQUEST_METHOD }} +Request URL: {{ request_protocol }}://{{ request.META.HTTP_HOST }}{{ request.path|escape }} +Django Version: {{ django_version_info }} +Python Version: {{ sys_version_info }} +Installed Applications: +{{ settings.INSTALLED_APPS|pprint }} +Installed Middleware: +{{ settings.MIDDLEWARE_CLASSES|pprint }} + +{% if template_does_not_exist %}Template Loader Error: +{% if loader_debug_info %}Django tried loading these templates, in this order: +{% for loader in loader_debug_info %}Using loader {{ loader.loader }}: +{% for t in loader.templates %}{{ t.name }} (File {% if t.exists %}exists{% else %}does not exist{% endif %}) +{% endfor %}{% endfor %} +{% else %}Django couldn't find any templates because your TEMPLATE_LOADERS setting is empty! +{% endif %} +{% endif %}{% if template_info %} +Template error: +In template {{ template_info.name }}, error at line {{ template_info.line }} + {{ template_info.message }}{% for source_line in template_info.source_lines %}{% ifequal source_line.0 template_info.line %} + {{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }} +{% else %} + {{ source_line.0 }} : {{ source_line.1 }} +{% endifequal %}{% endfor %}{% endif %} +Traceback: +{% for frame in frames %}File "{{ frame.filename|escape }}" in {{ frame.function|escape }} +{% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line|escape }}{% endif %} +{% endfor %} +Exception Type: {{ exception_type|escape }} at {{ request.path|escape }} +Exception Value: {{ exception_value|escape }} +</textarea> + <br><br> + <input type="submit" value="Share this traceback on a public Web site"> + </div> +</form> +</div> + +<div id="requestinfo"> + <h2>Request information</h2> + + <h3 id="get-info">GET</h3> + {% if request.GET %} + <table class="req"> + <thead> + <tr> + <th>Variable</th> + <th>Value</th> + </tr> + </thead> + <tbody> + {% for var in request.GET.items %} + <tr> + <td>{{ var.0 }}</td> + <td class="code"><div>{{ var.1|pprint }}</div></td> + </tr> + {% endfor %} + </tbody> + </table> + {% else %} + <p>No GET data</p> + {% endif %} + + <h3 id="post-info">POST</h3> + {% if request.POST %} + <table class="req"> + <thead> + <tr> + <th>Variable</th> + <th>Value</th> + </tr> + </thead> + <tbody> + {% for var in request.POST.items %} + <tr> + <td>{{ var.0 }}</td> + <td class="code"><div>{{ var.1|pprint }}</div></td> + </tr> + {% endfor %} + </tbody> + </table> + {% else %} + <p>No POST data</p> + {% endif %} + + <h3 id="cookie-info">COOKIES</h3> + {% if request.COOKIES %} + <table class="req"> + <thead> + <tr> + <th>Variable</th> + <th>Value</th> + </tr> + </thead> + <tbody> + {% for var in request.COOKIES.items %} + <tr> + <td>{{ var.0 }}</td> + <td class="code"><div>{{ var.1|pprint }}</div></td> + </tr> + {% endfor %} + </tbody> + </table> + {% else %} + <p>No cookie data</p> + {% endif %} + + <h3 id="meta-info">META</h3> + <table class="req"> + <thead> + <tr> + <th>Variable</th> + <th>Value</th> + </tr> + </thead> + <tbody> + {% for var in request.META.items|dictsort:"0" %} + <tr> + <td>{{ var.0 }}</td> + <td class="code"><div>{{ var.1|pprint }}</div></td> + </tr> + {% endfor %} + </tbody> + </table> + + <h3 id="settings-info">Settings</h3> + <h4>Using settings module <code>{{ settings.SETTINGS_MODULE }}</code></h4> + <table class="req"> + <thead> + <tr> + <th>Setting</th> + <th>Value</th> + </tr> + </thead> + <tbody> + {% for var in settings.items|dictsort:"0" %} + <tr> + <td>{{ var.0 }}</td> + <td class="code"><div>{{ var.1|pprint }}</div></td> + </tr> + {% endfor %} + </tbody> + </table> + +</div> + +<div id="explanation"> + <p> + You're seeing this error because you have <code>DEBUG = True</code> in your + Django settings file. Change that to <code>False</code>, and Django will + display a standard 500 page. + </p> +</div> +</body> +</html> +""" + +TECHNICAL_404_TEMPLATE = """ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<html lang="en"> +<head> + <meta http-equiv="content-type" content="text/html; charset=utf-8"> + <title>Page not found at {{ request.path|escape }}</title> + <meta name="robots" content="NONE,NOARCHIVE"> + <style type="text/css"> + html * { padding:0; margin:0; } + body * { padding:10px 20px; } + body * * { padding:0; } + body { font:small sans-serif; background:#eee; } + body>div { border-bottom:1px solid #ddd; } + h1 { font-weight:normal; margin-bottom:.4em; } + h1 span { font-size:60%; color:#666; font-weight:normal; } + table { border:none; border-collapse: collapse; width:100%; } + td, th { vertical-align:top; padding:2px 3px; } + th { width:12em; text-align:right; color:#666; padding-right:.5em; } + #info { background:#f6f6f6; } + #info ol { margin: 0.5em 4em; } + #info ol li { font-family: monospace; } + #summary { background: #ffc; } + #explanation { background:#eee; border-bottom: 0px none; } + </style> +</head> +<body> + <div id="summary"> + <h1>Page not found <span>(404)</span></h1> + <table class="meta"> + <tr> + <th>Request Method:</th> + <td>{{ request.META.REQUEST_METHOD }}</td> + </tr> + <tr> + <th>Request URL:</th> + <td>{{ request_protocol }}://{{ request.META.HTTP_HOST }}{{ request.path|escape }}</td> + </tr> + </table> + </div> + <div id="info"> + {% if urlpatterns %} + <p> + Using the URLconf defined in <code>{{ settings.ROOT_URLCONF }}</code>, + Django tried these URL patterns, in this order: + </p> + <ol> + {% for pattern in urlpatterns %} + <li>{{ pattern }}</li> + {% endfor %} + </ol> + <p>The current URL, <code>{{ request_path|escape }}</code>, didn't match any of these.</p> + {% else %} + <p>{{ reason }}</p> + {% endif %} + </div> + + <div id="explanation"> + <p> + You're seeing this error because you have <code>DEBUG = True</code> in + your Django settings file. Change that to <code>False</code>, and Django + will display a standard 404 page. + </p> + </div> +</body> +</html> +""" + +EMPTY_URLCONF_TEMPLATE = """ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<html lang="en"><head> + <meta http-equiv="content-type" content="text/html; charset=utf-8"> + <meta name="robots" content="NONE,NOARCHIVE"><title>Welcome to Django</title> + <style type="text/css"> + html * { padding:0; margin:0; } + body * { padding:10px 20px; } + body * * { padding:0; } + body { font:small sans-serif; } + body>div { border-bottom:1px solid #ddd; } + h1 { font-weight:normal; } + h2 { margin-bottom:.8em; } + h2 span { font-size:80%; color:#666; font-weight:normal; } + h3 { margin:1em 0 .5em 0; } + h4 { margin:0 0 .5em 0; font-weight: normal; } + table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; } + tbody td, tbody th { vertical-align:top; padding:2px 3px; } + thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; } + tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; } + ul { margin-left: 2em; margin-top: 1em; } + #summary { background: #e0ebff; } + #summary h2 { font-weight: normal; color: #666; } + #explanation { background:#eee; } + #instructions { background:#f6f6f6; } + #summary table { border:none; background:transparent; } + </style> +</head> + +<body> +<div id="summary"> + <h1>It worked!</h1> + <h2>Congratulations on your first Django-powered page.</h2> +</div> + +<div id="instructions"> + <p>Of course, you haven't actually done any work yet. Here's what to do next:</p> + <ul> + <li>If you plan to use a database, edit the <code>DATABASE_*</code> settings in <code>{{ project_name }}/settings.py</code>.</li> + <li>Start your first app by running <code>python {{ project_name }}/manage.py startapp [appname]</code>.</li> + </ul> +</div> + +<div id="explanation"> + <p> + You're seeing this message because you have <code>DEBUG = True</code> in your + Django settings file and you haven't configured any URLs. Get to work! + </p> +</div> +</body></html> +""" This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <th...@us...> - 2008-03-20 16:30:41
|
Revision: 4238 http://jython.svn.sourceforge.net/jython/?rev=4238&view=rev Author: thobes Date: 2008-03-20 09:30:36 -0700 (Thu, 20 Mar 2008) Log Message: ----------- Fixed the build errors in the ast sub-dir. At least it works for me now. Also fixed the build.xml in pyasm so that it can build the entire project. Modified Paths: -------------- trunk/sandbox/ast/asdl_antlr.py trunk/sandbox/ast/build.xml trunk/sandbox/ast/src/org/python/antlr/PythonTree.java trunk/sandbox/pyasm/build.properties trunk/sandbox/pyasm/build.xml trunk/sandbox/pyasm/compiler/org/python/newcompiler/ast/AstToBytecode.java Modified: trunk/sandbox/ast/asdl_antlr.py =================================================================== --- trunk/sandbox/ast/asdl_antlr.py 2008-03-20 15:38:33 UTC (rev 4237) +++ trunk/sandbox/ast/asdl_antlr.py 2008-03-20 16:30:36 UTC (rev 4238) @@ -276,35 +276,35 @@ #self.emit("", 0) # The accept() method - #self.emit("public Object accept(VisitorIF visitor) throws Exception {", depth) - #if clsname == ctorname: - # self.emit('return visitor.visit%s(this);' % clsname, depth+1) - #else: - # self.emit('traverse(visitor);' % clsname, depth+1) - # self.emit('return null;' % clsname, depth+1) - #self.emit("}", depth) - #self.emit("", 0) + self.emit("public <R> R accept(VisitorIF<R> visitor) throws Exception {", depth) + if clsname == ctorname: + self.emit('return visitor.visit%s(this);' % clsname, depth+1) + else: + self.emit('traverse(visitor);' % clsname, depth+1) + self.emit('return null;' % clsname, depth+1) + self.emit("}", depth) + self.emit("", 0) # The visitChildren() method - #self.emit("public void traverse(VisitorIF visitor) throws Exception {", depth) - #for f in fields: - # if self.bltinnames.has_key(str(f.type)): - # continue - # if f.typedef.simple: - # continue - # if f.seq: - # self.emit('if (%s != null) {' % f.name, depth+1) - # self.emit('for (int i = 0; i < %s.length; i++) {' % f.name, - # depth+2) - # self.emit('if (%s[i] != null)' % f.name, depth+3) - # self.emit('%s[i].accept(visitor);' % f.name, depth+4) - # self.emit('}', depth+2) - # self.emit('}', depth+1) - # else: - # self.emit('if (%s != null)' % f.name, depth+1) - # self.emit('%s.accept(visitor);' % f.name, depth+2) - #self.emit('}', depth) - #self.emit("", 0) + self.emit("public void traverse(VisitorIF visitor) throws Exception {", depth) + for f in fields: + if self.bltinnames.has_key(str(f.type)): + continue + if f.typedef.simple: + continue + if f.seq: + self.emit('if (%s != null) {' % f.name, depth+1) + self.emit('for (int i = 0; i < %s.length; i++) {' % f.name, + depth+2) + self.emit('if (%s[i] != null)' % f.name, depth+3) + self.emit('%s[i].accept(visitor);' % f.name, depth+4) + self.emit('}', depth+2) + self.emit('}', depth+1) + else: + self.emit('if (%s != null)' % f.name, depth+1) + self.emit('%s.accept(visitor);' % f.name, depth+2) + self.emit('}', depth) + self.emit("", 0) def visitField(self, field, depth): self.emit("public %s;" % self.fieldDef(field), depth) @@ -338,25 +338,25 @@ for dfn in mod.dfns: self.visit(dfn) self.open("VisitorIF", refersToPythonTree=0) - self.emit('public interface VisitorIF {', 0) + self.emit('public interface VisitorIF<R> {', 0) for ctor in self.ctors: - self.emit("public Object visit%s(%s node) throws Exception;" % + self.emit("public R visit%s(%s node) throws Exception;" % (ctor, ctor), 1) self.emit('}', 0) self.close() self.open("VisitorBase") - self.emit('public abstract class VisitorBase implements VisitorIF {', 0) + self.emit('public abstract class VisitorBase<R> implements VisitorIF<R> {', 0) for ctor in self.ctors: - self.emit("public Object visit%s(%s node) throws Exception {" % + self.emit("public R visit%s(%s node) throws Exception {" % (ctor, ctor), 1) - self.emit("Object ret = unhandled_node(node);", 2) + self.emit("R ret = unhandled_node(node);", 2) self.emit("traverse(node);", 2) self.emit("return ret;", 2) self.emit('}', 1) self.emit('', 0) - self.emit("abstract protected Object unhandled_node(PythonTree node) throws Exception;", 1) + self.emit("abstract protected R unhandled_node(PythonTree node) throws Exception;", 1) self.emit("abstract public void traverse(PythonTree node) throws Exception;", 1) self.emit('}', 0) self.close() Modified: trunk/sandbox/ast/build.xml =================================================================== --- trunk/sandbox/ast/build.xml 2008-03-20 15:38:33 UTC (rev 4237) +++ trunk/sandbox/ast/build.xml 2008-03-20 16:30:36 UTC (rev 4238) @@ -12,7 +12,7 @@ </target> <target name="antlr_gen" depends="init"> - <java classname="org.antlr.Tool"> + <java classname="org.antlr.Tool" failonerror="true"> <arg value="-fo"/> <arg path="build/gensrc/org/python/antlr"/> <arg value="-lib"/> Modified: trunk/sandbox/ast/src/org/python/antlr/PythonTree.java =================================================================== --- trunk/sandbox/ast/src/org/python/antlr/PythonTree.java 2008-03-20 15:38:33 UTC (rev 4237) +++ trunk/sandbox/ast/src/org/python/antlr/PythonTree.java 2008-03-20 16:30:36 UTC (rev 4238) @@ -9,6 +9,8 @@ import org.antlr.runtime.RecognitionException; +import org.python.antlr.ast.VisitorIF; + public class PythonTree extends CommonTree { public PythonTree(Token token) { @@ -20,33 +22,41 @@ } public String toString() { - if ( isNil() ) { - return "None"; - } - return token.getText(); - } + if (isNil()) { + return "None"; + } + return token.getText(); + } public String toStringTree() { - if ( children==null || children.size()==0 ) { - //System.out.println("Where are my children? -- asks " + token.getText()); - return this.toString(); - } - StringBuffer buf = new StringBuffer(); - if ( !isNil() ) { - buf.append("("); - buf.append(this.toString()); - buf.append(' '); - } - for (int i = 0; children!=null && i < children.size(); i++) { - BaseTree t = (BaseTree) children.get(i); - if ( i>0 ) { - buf.append(' '); - } - buf.append(t.toStringTree()); - } - if ( !isNil() ) { - buf.append(")"); - } - return buf.toString(); - } + if (children == null || children.size() == 0) { + // System.out.println("Where are my children? -- asks " + token.getText()); + return this.toString(); + } + StringBuffer buf = new StringBuffer(); + if (!isNil()) { + buf.append("("); + buf.append(this.toString()); + buf.append(' '); + } + for (int i = 0; children != null && i < children.size(); i++) { + BaseTree t = (BaseTree)children.get(i); + if (i > 0) { + buf.append(' '); + } + buf.append(t.toStringTree()); + } + if (!isNil()) { + buf.append(")"); + } + return buf.toString(); + } + + public <R> R accept(VisitorIF<R> visitor) throws Exception { + throw new RuntimeException("Unexpected node: " + this); + } + + public void traverse(VisitorIF visitor) throws Exception { + throw new RuntimeException("Cannot traverse node: " + this); + } } Modified: trunk/sandbox/pyasm/build.properties =================================================================== --- trunk/sandbox/pyasm/build.properties 2008-03-20 15:38:33 UTC (rev 4237) +++ trunk/sandbox/pyasm/build.properties 2008-03-20 16:30:36 UTC (rev 4238) @@ -1,2 +1,3 @@ # Default jython location is mapped to the jython.dir=../../jython +ast.dir=../ast Modified: trunk/sandbox/pyasm/build.xml =================================================================== --- trunk/sandbox/pyasm/build.xml 2008-03-20 15:38:33 UTC (rev 4237) +++ trunk/sandbox/pyasm/build.xml 2008-03-20 16:30:36 UTC (rev 4238) @@ -1,4 +1,4 @@ -<project name="pyasm" default="pyasm-copy" basedir="."> +<project name="newcompiler" default="all" basedir="."> <property file="${basedir}/build.properties"/> <property name="dist.dir" value="${jython.dir}/dist"/> @@ -9,39 +9,80 @@ <property name="jdk.target.version" value="1.5"/> <property name="jdk.source.version" value="1.5"/> <property name="agent.class" value="org.python.javaagent.DeferringAgent"/> - + <path id="jython.class.path"> - <pathelement location="${jython.dir}/build/classes"/> + <pathelement location="${jython.dir}/build/classes"/> </path> + <path id="ast.class.path"> + <pathelement location="${ast.dir}/build"/> + </path> + <path id="antlr.runtime.path"> <pathelement path="${jython.dir}/extlibs/antlr-runtime-3.0.1.jar"/> </path> - <target name="all" depends="pyasm-copy,agent,script"/> + <target name="all" depends="compiler,pyasm-copy,script"/> + <target name="clean"> + <delete dir="build"/> + </target> + + <target name="clean-all" depends="clean"> + <ant dir="${ast.dir}" target="clean" inheritAll="false"/> + <ant dir="${jython.dir}" target="clean" inheritAll="false"/> + </target> + <target name="jython"> <ant dir="${jython.dir}" target="developer-build" inheritAll="false"/> </target> + <target name="ast"> + <echo>WARNING!!! + The ast build.xml has been known to fail on a few platforms, + manual build might be required. In that case, use this + build.xml file to figure out the dependancies. On the + platforms where the ast build fails it can be executed with: + .../sandbox/ast $ ant + [Generates all sources, then fails silently] + .../sandbox/ast $ ant compile + [Compiles the ast sources] + + After that you will need to remove or comment out the line + after this echo in this build file (the one that references + the ast/build.xml), so that this build will not try to re- + -build the ast. + </echo> + <ant dir="${ast.dir}" target="all" inheritAll="false"/> + <jar destfile="${dist.dir}/antlr-ast.jar" update="true"> + <fileset dir="${ast.dir}/build"> + <include name="org/python/antlr/**"/> + </fileset> + </jar> + </target> + <target name="pyasm-copy" depends="jython"> <copy todir="${dist.dir}/Lib" preservelastmodified="true" overwrite="true"> <fileset dir="${basedir}"> + <include name="newcompiler.py"/> <include name="marshal.py"/> + <include name="adapter.py"/> <include name="pyasm.py"/> <include name="opcode.py"/> + <include name="astimport.py"/> <include name="pycimport.py"/> </fileset> </copy> </target> - <target name="compile"> + <target name="compile" depends="ast,jython"> <mkdir dir="${build.dir}"/> <javac destdir="${build.dir}" source="${jdk.source.version}" target="${jdk.target.version}"> <src path="${agent.dir}"/> <src path="${compiler.dir}"/> - <classpath refid="jython.class.path" /> + <classpath refid="jython.class.path"/> + <classpath refid="ast.class.path"/> <classpath refid="antlr.runtime.path" /> </javac> </target> @@ -49,15 +90,15 @@ <target name="agent" depends="compile"> <jar destfile="${dist.dir}/agent.jar" update="true"> <fileset dir="${build.dir}"> - <include name="org/python/javaagent/**" /> + <include name="org/python/javaagent/**"/> </fileset> <manifest> <!-- Instrumentation agent, for a more dynamic newcompiler --> - <attribute name="Premain-Class" value="${agent.class}" /> - <attribute name="Can-Redefine-Classes" value="true" /> + <attribute name="Premain-Class" value="${agent.class}"/> + <attribute name="Can-Redefine-Classes" value="true"/> <!-- Java 6 stuff, more powerful agents --> - <attribute name="Agent-Class" value="${agent.class}" /> - <attribute name="Can-Retransform-Classes" value="true" /> + <attribute name="Agent-Class" value="${agent.class}"/> + <attribute name="Can-Retransform-Classes" value="true"/> </manifest> </jar> </target> @@ -65,7 +106,7 @@ <target name="compiler" depends="compile"> <jar destfile="${dist.dir}/compiler.jar" update="true"> <fileset dir="${build.dir}"> - <exclude name="org/python/javaagent/**" /> + <exclude name="org/python/javaagent/**"/> </fileset> </jar> </target> @@ -77,6 +118,8 @@ EXE=`readlink $EXE` done BASE=`dirname $EXE` +ANTLR_AST=$BASE/dist/antlr-ast.jar +COMPILER=$BASE/dist/compiler.jar AGENT=$BASE/dist/agent.jar JVMFLAGS="-server -Dpython.home=$BASE/dist/ -Dpython.executable=$0 $JVMFLAGS" if [ -e $AGENT ]; then @@ -90,10 +133,17 @@ REQ=$REQ:$BASE/extlibs/asm-util-3.1.jar REQ=$REQ:$BASE/extlibs/junit-3.8.2.jar REQ=$REQ:$BASE/extlibs/jython-engine.jar +REQ=$REQ:$BASE/extlibs/antlr-runtime-3.0.1.jar REQ=$REQ:$BASE/dist/jython.jar if [ "$CLASSPATH" = "" ]; then CLASSPATH=. fi +if [ -e $ANTLR_AST ]; then + CLASSPATH=$CLASSPATH:$ANTLR_AST +fi +if [ -e $COMPILER ]; then + CLASSPATH=$CLASSPATH:$COMPILER +fi if [ "$JAVA_HOME" != "" ]; then JAVA=$JAVA_HOME/bin/java else Modified: trunk/sandbox/pyasm/compiler/org/python/newcompiler/ast/AstToBytecode.java =================================================================== --- trunk/sandbox/pyasm/compiler/org/python/newcompiler/ast/AstToBytecode.java 2008-03-20 15:38:33 UTC (rev 4237) +++ trunk/sandbox/pyasm/compiler/org/python/newcompiler/ast/AstToBytecode.java 2008-03-20 16:30:36 UTC (rev 4238) @@ -225,10 +225,10 @@ private void buildContext(modType node) throws Exception { context = node.accept(new ContextBuilder()); } - + private void sendResumeTable() { - if(currentEnvironment.isReenterable()) { - compiler.visitResumeTable(new Label(), currentEnvironment.getEntryPoints()); + if (currentEnvironment.isReenterable()) { + compiler.visitResumeTable(new Label(), currentEnvironment.getEntryPoints()); } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <nr...@us...> - 2008-03-20 23:17:33
|
Revision: 4241 http://jython.svn.sourceforge.net/jython/?rev=4241&view=rev Author: nriley Date: 2008-03-20 16:17:31 -0700 (Thu, 20 Mar 2008) Log Message: ----------- Oops. (Also, it compares Python bytecode, not ASTs, despite the name.) Added Paths: ----------- trunk/sandbox/pyasm/astcompare.py Removed Paths: ------------- trunk/sandbox/ast/astcompare.py Deleted: trunk/sandbox/ast/astcompare.py =================================================================== --- trunk/sandbox/ast/astcompare.py 2008-03-20 23:03:23 UTC (rev 4240) +++ trunk/sandbox/ast/astcompare.py 2008-03-20 23:17:31 UTC (rev 4241) @@ -1,124 +0,0 @@ -import os, re, sys - -from org.python.newcompiler.bytecode import PythonBytecodeCompilingBundle -import org.python.antlr.Main as Parser -from org.python.newcompiler.ast import AstToBytecode -from newcompiler import BytecodeCompiler, CodeReference - -py_line_no = 0 -py_line_no_printed = False -cpy_dis = [] -cpy_dis_line_no = 0 -dis_offset = 0 - -class Disassembler(BytecodeCompiler): - def __init__(self, codeinfo, level=0, post=None): - self.__codeinfo = codeinfo - self.__pre = "pyasm." - self.__level = level - self.__ind = ">"*(4*level) - self.post = post - self.reference = CodeReference(codeinfo) - def __join(self, it, sep=','): - if it is not None: - return sep.join(it) - else: - return "" - def __str__(self): - return self.__repr__() - def __repr__(self): - return "Disassembler()" - def constructClass(self, name, closureNames, flags): - print "%s%sconstructClass(%s, [%s], [%s]):" % ( - self.__ind, self.__pre, name, - self.__join(closureNames), self.__join(flags)) - return Disassembler(self.__codeinfo, self.__level+1) - def constructFunction(self, closureNames, numDefault, flags, name=None): - # print "%s%sconstructFunction([%s], %s, [%s], %s):" % ( - # self.__ind, self.__pre, self.__join(closureNames), - # numDefault, self.__join(flags), name) - def post(dis_offset=dis_offset): - self.dump(None, dis_offset, 'LOAD_CONST', '<code object>') - self.dump(None, None, 'MAKE_FUNCTION') - self.dump(None, None, 'STORE_NAME', name) - return Disassembler(self.__codeinfo, self.__level+1, post) - def dump(self, line_no, offset, opname, *args): - global py_line_no, py_line_no_printed, cpy_dis_line_no, dis_offset - print self.__ind, - if self.__level == 0: - print '%3d:' % cpy_dis_line_no, - else: - print ' ', - if py_line_no_printed or self.__level > 0: - print ' ', - else: - if line_no is None: line_no = py_line_no - print '%3d' % (line_no + 1), - py_line_no_printed = True - if self.__level == 0: - print '| ', - else: - print ' ', - if offset is None: offset = dis_offset - print '%4d' % offset, - print OPNAMES.get(opname, opname).ljust(20), - if args: - print '(%s)' % ','.join(args) - if self.__level == 0: - dis_offset += 2 + len(args) - else: - print - if self.__level == 0: - dis_offset += 1 - if self.__level == 0: - if cpy_dis_line_no < len(cpy_dis): - print '%4d:' % cpy_dis_line_no, cpy_dis[cpy_dis_line_no] - cpy_dis_line_no += 1 - - def __getattr__(self,attr): - def visit(*args): - opname = '_'.join([c.upper() - for c in re.findall(r'[A-Z](?:[^A-Z]*)', - attr.replace('visit', ''))]) - self.dump(None, None, opname, *[repr(a) for a in args]) - return visit - def visitLineNumber(self, lineNumber): - global py_line_no, py_line_no_printed - py_line_no = lineNumber - py_line_no_printed = False - def visitStop(self): - if self.post is None: - print '-- stop --' - else: - self.post() - def visitResumeTable(self, start, entries): - print "%s%svisitResumeTable(" % (self.__ind, self.__pre) - print "%s start: %s," % (self.__ind, start), - for entry in entries: - print "\n%s entry: %s," % (self.__ind, entry), - print ")" - -class DisassemblerBundle(PythonBytecodeCompilingBundle): - def __init__(self): pass - def compile(self, signature, info, flags, storeable): - return Disassembler(info) - def loadHandler(self, loader): pass - def saveFilesAndLoadHandle(self, loader, dir): pass - -def _parse(*files): - return Parser().parse(files) -def _pbc(ast, bundle, name): - return ast.accept(AstToBytecode(bundle, name)) -def _compile(filepath, name, *args,**kwargs): - module = _pbc(_parse(filepath), DisassemblerBundle(*args, **kwargs), name) - -OPNAMES = dict(LOAD_CONSTANT='LOAD_CONST', RETURN='RETURN_VALUE', - LOAD='LOAD_ATTRIBUTE') - -if __name__ == '__main__': - from subprocess import Popen, PIPE - f = sys.argv[1] - p = Popen(['python', '-m', 'dis', f], stdout=PIPE) - for line in p.stdout: - cpy_dis.append('%s%s' % (line[:36], line[42:-2])) - _compile(f, os.path.splitext(f)[0]) Copied: trunk/sandbox/pyasm/astcompare.py (from rev 4240, trunk/sandbox/ast/astcompare.py) =================================================================== --- trunk/sandbox/pyasm/astcompare.py (rev 0) +++ trunk/sandbox/pyasm/astcompare.py 2008-03-20 23:17:31 UTC (rev 4241) @@ -0,0 +1,124 @@ +import os, re, sys + +from org.python.newcompiler.bytecode import PythonBytecodeCompilingBundle +import org.python.antlr.Main as Parser +from org.python.newcompiler.ast import AstToBytecode +from newcompiler import BytecodeCompiler, CodeReference + +py_line_no = 0 +py_line_no_printed = False +cpy_dis = [] +cpy_dis_line_no = 0 +dis_offset = 0 + +class Disassembler(BytecodeCompiler): + def __init__(self, codeinfo, level=0, post=None): + self.__codeinfo = codeinfo + self.__pre = "pyasm." + self.__level = level + self.__ind = ">"*(4*level) + self.post = post + self.reference = CodeReference(codeinfo) + def __join(self, it, sep=','): + if it is not None: + return sep.join(it) + else: + return "" + def __str__(self): + return self.__repr__() + def __repr__(self): + return "Disassembler()" + def constructClass(self, name, closureNames, flags): + print "%s%sconstructClass(%s, [%s], [%s]):" % ( + self.__ind, self.__pre, name, + self.__join(closureNames), self.__join(flags)) + return Disassembler(self.__codeinfo, self.__level+1) + def constructFunction(self, closureNames, numDefault, flags, name=None): + # print "%s%sconstructFunction([%s], %s, [%s], %s):" % ( + # self.__ind, self.__pre, self.__join(closureNames), + # numDefault, self.__join(flags), name) + def post(dis_offset=dis_offset): + self.dump(None, dis_offset, 'LOAD_CONST', '<code object>') + self.dump(None, None, 'MAKE_FUNCTION') + self.dump(None, None, 'STORE_NAME', name) + return Disassembler(self.__codeinfo, self.__level+1, post) + def dump(self, line_no, offset, opname, *args): + global py_line_no, py_line_no_printed, cpy_dis_line_no, dis_offset + print self.__ind, + if self.__level == 0: + print '%3d:' % cpy_dis_line_no, + else: + print ' ', + if py_line_no_printed or self.__level > 0: + print ' ', + else: + if line_no is None: line_no = py_line_no + print '%3d' % (line_no + 1), + py_line_no_printed = True + if self.__level == 0: + print '| ', + else: + print ' ', + if offset is None: offset = dis_offset + print '%4d' % offset, + print OPNAMES.get(opname, opname).ljust(20), + if args: + print '(%s)' % ','.join(args) + if self.__level == 0: + dis_offset += 2 + len(args) + else: + print + if self.__level == 0: + dis_offset += 1 + if self.__level == 0: + if cpy_dis_line_no < len(cpy_dis): + print '%4d:' % cpy_dis_line_no, cpy_dis[cpy_dis_line_no] + cpy_dis_line_no += 1 + + def __getattr__(self,attr): + def visit(*args): + opname = '_'.join([c.upper() + for c in re.findall(r'[A-Z](?:[^A-Z]*)', + attr.replace('visit', ''))]) + self.dump(None, None, opname, *[repr(a) for a in args]) + return visit + def visitLineNumber(self, lineNumber): + global py_line_no, py_line_no_printed + py_line_no = lineNumber + py_line_no_printed = False + def visitStop(self): + if self.post is None: + print '-- stop --' + else: + self.post() + def visitResumeTable(self, start, entries): + print "%s%svisitResumeTable(" % (self.__ind, self.__pre) + print "%s start: %s," % (self.__ind, start), + for entry in entries: + print "\n%s entry: %s," % (self.__ind, entry), + print ")" + +class DisassemblerBundle(PythonBytecodeCompilingBundle): + def __init__(self): pass + def compile(self, signature, info, flags, storeable): + return Disassembler(info) + def loadHandler(self, loader): pass + def saveFilesAndLoadHandle(self, loader, dir): pass + +def _parse(*files): + return Parser().parse(files) +def _pbc(ast, bundle, name): + return ast.accept(AstToBytecode(bundle, name)) +def _compile(filepath, name, *args,**kwargs): + module = _pbc(_parse(filepath), DisassemblerBundle(*args, **kwargs), name) + +OPNAMES = dict(LOAD_CONSTANT='LOAD_CONST', RETURN='RETURN_VALUE', + LOAD='LOAD_ATTRIBUTE') + +if __name__ == '__main__': + from subprocess import Popen, PIPE + f = sys.argv[1] + p = Popen(['python', '-m', 'dis', f], stdout=PIPE) + for line in p.stdout: + cpy_dis.append('%s%s' % (line[:36], line[42:-2])) + _compile(f, os.path.splitext(f)[0]) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |