1 // $Id: CQLLexer.java,v 1.14 2007-07-03 13:30:42 mike Exp $
3 package org.z3950.zing.cql;
4 import java.io.StreamTokenizer;
5 import java.io.StringReader;
8 // This is a semi-trivial subclass for java.io.StreamTokenizer that:
9 // * Has a halfDecentPushBack() method that actually works
10 // * Includes a render() method
11 // * Knows about the multi-character tokens "<=", ">=" and "<>"
12 // * Recognises a set of keywords as tokens in their own right
13 // * Includes some primitive debugging-output facilities
14 // It's used only by CQLParser.
16 class CQLLexer extends StreamTokenizer {
17 // New publicly visible token-types
18 public final static int TT_LE = 1000; // The "<=" relation
19 public final static int TT_GE = 1001; // The ">=" relation
20 public final static int TT_NE = 1002; // The "<>" relation
21 public final static int TT_EQEQ = 1003; // The "==" relation
22 public final static int TT_AND = 1004; // The "and" boolean
23 public final static int TT_OR = 1005; // The "or" boolean
24 public final static int TT_NOT = 1006; // The "not" boolean
25 public final static int TT_PROX = 1007; // The "prox" boolean
26 public final static int TT_SORTBY = 1008; // The "sortby" operator
28 // Support for keywords. It would be nice to compile this linear
29 // list into a Hashtable, but it's hard to store ints as hash
30 // values, and next to impossible to use them as hash keys. So
31 // we'll just scan the (very short) list every time we need to do
33 private class Keyword {
36 Keyword(int token, String keyword) {
38 this.keyword = keyword;
41 // This should logically be static, but Java won't allow it :-P
42 private Keyword[] keywords = {
43 new Keyword(TT_AND, "and"),
44 new Keyword(TT_OR, "or"),
45 new Keyword(TT_NOT, "not"),
46 new Keyword(TT_PROX, "prox"),
47 new Keyword(TT_SORTBY, "sortby"),
50 // For halfDecentPushBack() and the code at the top of nextToken()
51 private static int TT_UNDEFINED = -1000;
52 private int saved_ttype = TT_UNDEFINED;
53 private double saved_nval;
54 private String saved_sval;
56 // Controls debugging output
57 private static boolean DEBUG;
59 CQLLexer(String cql, boolean lexdebug) {
60 super(new StringReader(cql));
61 wordChars('!', '?'); // ASCII-dependency!
62 wordChars('[', '`'); // ASCII-dependency!
70 wordChars('\'', '\''); // prevent this from introducing strings
74 ordinaryChars('0', '9');
79 private static void debug(String str) {
81 System.err.println("LEXDEBUG: " + str);
84 // I don't honestly understand why we need this, but the
85 // documentation for java.io.StreamTokenizer.pushBack() is pretty
86 // vague about its semantics, and it seems to me that they could
87 // be summed up as "it doesn't work". This version has the very
88 // clear semantics "pretend I didn't call nextToken() just then".
90 private void halfDecentPushBack() {
97 public int nextToken() throws java.io.IOException {
98 if (saved_ttype != TT_UNDEFINED) {
102 saved_ttype = TT_UNDEFINED;
103 debug("using saved ttype=" + ttype + ", " +
104 "nval=" + nval + ", sval='" + sval + "'");
108 underlyingNextToken();
110 debug("token starts with '<' ...");
111 underlyingNextToken();
113 debug("token continues with '=' - it's '<='");
115 } else if (ttype == '>') {
116 debug("token continues with '>' - it's '<>'");
119 debug("next token is " + render() + " (pushed back)");
120 halfDecentPushBack();
122 debug("AFTER: ttype is now " + ttype + " - " + render());
124 } else if (ttype == '>') {
125 debug("token starts with '>' ...");
126 underlyingNextToken();
128 debug("token continues with '=' - it's '>='");
131 debug("next token is " + render() + " (pushed back)");
132 halfDecentPushBack();
134 debug("AFTER: ttype is now " + ttype + " - " + render());
136 } else if (ttype == '=') {
137 debug("token starts with '=' ...");
138 underlyingNextToken();
140 debug("token continues with '=' - it's '=='");
143 debug("next token is " + render() + " (pushed back)");
144 halfDecentPushBack();
146 debug("AFTER: ttype is now " + ttype + " - " + render());
150 debug("done nextToken(): ttype=" + ttype + ", " +
151 "nval=" + nval + ", " + "sval='" + sval + "'" +
152 " (" + render() + ")");
157 // It's important to do keyword recognition here at the lowest
158 // level, otherwise when one of these words follows "<" or ">"
159 // (which can be the beginning of multi-character tokens) it gets
160 // pushed back as a string, and its keywordiness is not
163 public int underlyingNextToken() throws java.io.IOException {
165 if (ttype == TT_WORD)
166 for (int i = 0; i < keywords.length; i++)
167 if (sval.equalsIgnoreCase(keywords[i].keyword))
168 ttype = keywords[i].token;
173 // Simpler interface for the usual case: current token with quoting
175 return render(ttype, true);
178 String render(int token, boolean quoteChars) {
179 if (token == TT_EOF) {
181 } else if (token == TT_NUMBER) {
182 if ((double) nval == (int) nval) {
183 return new Integer((int) nval).toString();
185 return new Double((double) nval).toString();
187 } else if (token == TT_WORD) {
188 return "word: " + sval;
189 } else if (token == '"') {
190 return "string: \"" + sval + "\"";
191 } else if (token == TT_LE) {
193 } else if (token == TT_GE) {
195 } else if (token == TT_NE) {
197 } else if (token == TT_EQEQ) {
201 // Check whether its associated with one of the keywords
202 for (int i = 0; i < keywords.length; i++)
203 if (token == keywords[i].token)
204 return keywords[i].keyword;
206 // Otherwise it must be a single character, such as '(' or '/'.
207 String res = String.valueOf((char) token);
208 if (quoteChars) res = "'" + res + "'";
212 public static void main(String[] args) throws Exception {
213 if (args.length > 1) {
214 System.err.println("Usage: CQLLexer [<CQL-query>]");
215 System.err.println("If unspecified, query is read from stdin");
220 if (args.length == 1) {
223 byte[] bytes = new byte[10000];
225 // Read in the whole of standard input in one go
226 int nbytes = System.in.read(bytes);
227 } catch (java.io.IOException ex) {
228 System.err.println("Can't read query: " + ex.getMessage());
231 cql = new String(bytes);
234 CQLLexer lexer = new CQLLexer(cql, true);
236 while ((token = lexer.nextToken()) != TT_EOF) {
237 // Nothing to do: debug() statements render tokens for us