SQLite

Check-in [e0eb217aca]
Login

Many hyperlinks are disabled.
Use anonymous login to enable hyperlinks.

Overview
Comment:Disable the two-argument form of the fts3_tokenizer() SQL function unless the library is built with -DSQLITE_ENABLE_FTS3_TOKENIZER.
Downloads: Tarball | ZIP archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA1: e0eb217aca7e4aadf9c44ed20822b78139f7c83c
User & Date: dan 2016-02-08 19:40:17.991
Context
2016-02-08
20:06
Fix a compiler warning when compiling without SQLITE_ENABLE_FTS3_TOKENIZER. (check-in: 6926f28cd5 user: drh tags: trunk)
19:40
Disable the two-argument form of the fts3_tokenizer() SQL function unless the library is built with -DSQLITE_ENABLE_FTS3_TOKENIZER. (check-in: e0eb217aca user: dan tags: trunk)
19:36
Changes to help the tokenizer run about 33% faster. (check-in: a050e6f096 user: drh tags: trunk)
Changes
Unified Diff Ignore Whitespace Patch
Changes to ext/fts3/fts3_tokenizer.c.
63
64
65
66
67
68
69

70
71
72
73
74
75
76
77
78
79
80
81






82

83
84
85
86
87
88
89

  pHash = (Fts3Hash *)sqlite3_user_data(context);

  zName = sqlite3_value_text(argv[0]);
  nName = sqlite3_value_bytes(argv[0])+1;

  if( argc==2 ){

    void *pOld;
    int n = sqlite3_value_bytes(argv[1]);
    if( zName==0 || n!=sizeof(pPtr) ){
      sqlite3_result_error(context, "argument type mismatch", -1);
      return;
    }
    pPtr = *(void **)sqlite3_value_blob(argv[1]);
    pOld = sqlite3Fts3HashInsert(pHash, (void *)zName, nName, pPtr);
    if( pOld==pPtr ){
      sqlite3_result_error(context, "out of memory", -1);
      return;
    }






  }else{

    if( zName ){
      pPtr = sqlite3Fts3HashFind(pHash, zName, nName);
    }
    if( !pPtr ){
      char *zErr = sqlite3_mprintf("unknown tokenizer: %s", zName);
      sqlite3_result_error(context, zErr, -1);
      sqlite3_free(zErr);







>












>
>
>
>
>
>
|
>







63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97

  pHash = (Fts3Hash *)sqlite3_user_data(context);

  zName = sqlite3_value_text(argv[0]);
  nName = sqlite3_value_bytes(argv[0])+1;

  if( argc==2 ){
#ifdef SQLITE_ENABLE_FTS3_TOKENIZER
    void *pOld;
    int n = sqlite3_value_bytes(argv[1]);
    if( zName==0 || n!=sizeof(pPtr) ){
      sqlite3_result_error(context, "argument type mismatch", -1);
      return;
    }
    pPtr = *(void **)sqlite3_value_blob(argv[1]);
    pOld = sqlite3Fts3HashInsert(pHash, (void *)zName, nName, pPtr);
    if( pOld==pPtr ){
      sqlite3_result_error(context, "out of memory", -1);
      return;
    }
#else
    sqlite3_result_error(context, "fts3tokenize: " 
        "disabled - rebuild with -DSQLITE_ENABLE_FTS3_TOKENIZER", -1
    );
    return;
#endif /* SQLITE_ENABLE_FTS3_TOKENIZER */
  }else
  {
    if( zName ){
      pPtr = sqlite3Fts3HashFind(pHash, zName, nName);
    }
    if( !pPtr ){
      char *zErr = sqlite3_mprintf("unknown tokenizer: %s", zName);
      sqlite3_result_error(context, zErr, -1);
      sqlite3_free(zErr);
416
417
418
419
420
421
422

423
424
425
426
427

428
429
430
431
432
433
434
  assert( p1==p2 );
  rc = queryTokenizer(db, "nosuchtokenizer", &p2);
  assert( rc==SQLITE_ERROR );
  assert( p2==0 );
  assert( 0==strcmp(sqlite3_errmsg(db), "unknown tokenizer: nosuchtokenizer") );

  /* Test the storage function */

  rc = registerTokenizer(db, "nosuchtokenizer", p1);
  assert( rc==SQLITE_OK );
  rc = queryTokenizer(db, "nosuchtokenizer", &p2);
  assert( rc==SQLITE_OK );
  assert( p2==p1 );


  sqlite3_result_text(context, "ok", -1, SQLITE_STATIC);
}

#endif

/*







>





>







424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
  assert( p1==p2 );
  rc = queryTokenizer(db, "nosuchtokenizer", &p2);
  assert( rc==SQLITE_ERROR );
  assert( p2==0 );
  assert( 0==strcmp(sqlite3_errmsg(db), "unknown tokenizer: nosuchtokenizer") );

  /* Test the storage function */
#ifdef SQLITE_ENABLE_FTS3_TOKENIZER
  rc = registerTokenizer(db, "nosuchtokenizer", p1);
  assert( rc==SQLITE_OK );
  rc = queryTokenizer(db, "nosuchtokenizer", &p2);
  assert( rc==SQLITE_OK );
  assert( p2==p1 );
#endif

  sqlite3_result_text(context, "ok", -1, SQLITE_STATIC);
}

#endif

/*
Changes to src/test_config.c.
365
366
367
368
369
370
371






372
373
374
375
376
377
378
#endif

#ifdef SQLITE_ENABLE_FTS3
  Tcl_SetVar2(interp, "sqlite_options", "fts3", "1", TCL_GLOBAL_ONLY);
#else
  Tcl_SetVar2(interp, "sqlite_options", "fts3", "0", TCL_GLOBAL_ONLY);
#endif







#ifdef SQLITE_ENABLE_FTS5
  Tcl_SetVar2(interp, "sqlite_options", "fts5", "1", TCL_GLOBAL_ONLY);
#else
  Tcl_SetVar2(interp, "sqlite_options", "fts5", "0", TCL_GLOBAL_ONLY);
#endif








>
>
>
>
>
>







365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
#endif

#ifdef SQLITE_ENABLE_FTS3
  Tcl_SetVar2(interp, "sqlite_options", "fts3", "1", TCL_GLOBAL_ONLY);
#else
  Tcl_SetVar2(interp, "sqlite_options", "fts3", "0", TCL_GLOBAL_ONLY);
#endif

#ifdef SQLITE_ENABLE_FTS3_TOKENIZER
  Tcl_SetVar2(interp, "sqlite_options", "fts3_tokenizer", "1", TCL_GLOBAL_ONLY);
#else
  Tcl_SetVar2(interp, "sqlite_options", "fts3_tokenizer", "0", TCL_GLOBAL_ONLY);
#endif

#ifdef SQLITE_ENABLE_FTS5
  Tcl_SetVar2(interp, "sqlite_options", "fts5", "1", TCL_GLOBAL_ONLY);
#else
  Tcl_SetVar2(interp, "sqlite_options", "fts5", "0", TCL_GLOBAL_ONLY);
#endif

Changes to test/fts3atoken.test.
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58

59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86




87

88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208

209
210
211
212
213
214

215
216
217

# If SQLITE_ENABLE_FTS3 is defined, omit this file.
ifcapable !fts3 {
  finish_test
  return
}

set ::testprefix fts3token

proc escape_string {str} {
  set out ""
  foreach char [split $str ""] {
    scan $char %c i
    if {$i<=127} {
      append out $char
    } else {
      append out [format {\x%.4x} $i]
    }
  }
  set out
}

#--------------------------------------------------------------------------
# Test cases fts3token-1.* are the warm-body test for the SQL scalar
# function fts3_tokenizer(). The procedure is as follows:
#
#   1: Verify that there is no such fts3 tokenizer as 'blah'.
#
#   2: Query for the built-in tokenizer 'simple'. Insert a copy of the
#      retrieved value as tokenizer 'blah'.
#
#   3: Test that the value returned for tokenizer 'blah' is now the
#      same as that retrieved for 'simple'.
#
#   4: Test that it is now possible to create an fts3 table using 
#      tokenizer 'blah' (it was not possible in step 1).
#
#   5: Test that the table created to use tokenizer 'blah' is usable.
#

do_test fts3token-1.1 {
  catchsql {
    CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah);
  }
} {1 {unknown tokenizer: blah}}
do_test fts3token-1.2 {
  execsql {
    SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL;
  }
} {0}
do_test fts3token-1.3 {
  execsql {
    SELECT fts3_tokenizer('blah') == fts3_tokenizer('simple');
  }
} {1}
do_test fts3token-1.4 {
  catchsql {
    CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah);
  }
} {0 {}}
do_test fts3token-1.5 {
  execsql {
    INSERT INTO t1(content) VALUES('There was movement at the station');
    INSERT INTO t1(content) VALUES('For the word has passed around');
    INSERT INTO t1(content) VALUES('That the colt from ol regret had got away');
    SELECT content FROM t1 WHERE content MATCH 'movement'
  }
} {{There was movement at the station}}






#--------------------------------------------------------------------------
# Test cases fts3token-2.* test error cases in the scalar function based
# API for getting and setting tokenizers.
#
do_test fts3token-2.1 {
  catchsql {
    SELECT fts3_tokenizer('nosuchtokenizer');
  }
} {1 {unknown tokenizer: nosuchtokenizer}}

#--------------------------------------------------------------------------
# Test cases fts3token-3.* test the three built-in tokenizers with a
# simple input string via the built-in test function. This is as much
# to test the test function as the tokenizer implementations.
#
do_test fts3token-3.1 {
  execsql {
    SELECT fts3_tokenizer_test('simple', 'I don''t see how');
  }
} {{0 i I 1 don don 2 t t 3 see see 4 how how}}
do_test fts3token-3.2 {
  execsql {
    SELECT fts3_tokenizer_test('porter', 'I don''t see how');
  }
} {{0 i I 1 don don 2 t t 3 see see 4 how how}}
ifcapable icu {
  do_test fts3token-3.3 {
    execsql {
      SELECT fts3_tokenizer_test('icu', 'I don''t see how');
    }
  } {{0 i I 1 don't don't 2 see see 3 how how}}
}

#--------------------------------------------------------------------------
# Test cases fts3token-4.* test the ICU tokenizer. In practice, this
# tokenizer only has two modes - "thai" and "everybody else". Some other
# Asian languages (Lao, Khmer etc.) require the same special treatment as 
# Thai, but ICU doesn't support them yet.
#
ifcapable icu {

  proc do_icu_test {name locale input output} {
    set ::out [db eval { SELECT fts3_tokenizer_test('icu', $locale, $input) }]
    do_test $name {
      lindex $::out 0
    } $output
  }
  
  do_icu_test fts3token-4.1 en_US  {}   {}
  do_icu_test fts3token-4.2 en_US {Test cases fts3} [list \
    0 test Test 1 cases cases 2 fts3 fts3
  ]

  # The following test shows that ICU is smart enough to recognise
  # Thai chararacters, even when the locale is set to English/United 
  # States.
  #
  set input "\u0e2d\u0e30\u0e44\u0e23\u0e19\u0e30\u0e04\u0e23\u0e31\u0e1a"
  set output    "0 \u0e2d\u0e30\u0e44\u0e23 \u0e2d\u0e30\u0e44\u0e23 "
  append output "1 \u0e19\u0e30 \u0e19\u0e30 "
  append output "2 \u0e04\u0e23\u0e31\u0e1a \u0e04\u0e23\u0e31\u0e1a"

  do_icu_test fts3token-4.3 th_TH  $input $output
  do_icu_test fts3token-4.4 en_US  $input $output

  # ICU handles an unknown locale by falling back to the default.
  # So this is not an error.
  do_icu_test fts3token-4.5 MiddleOfTheOcean  $input $output

  set    longtoken "AReallyReallyLongTokenOneThatWillSurelyRequire"
  append longtoken "AReallocInTheIcuTokenizerCode"

  set    input "short tokens then "
  append input $longtoken
  set    output "0 short short "
  append output "1 tokens tokens "
  append output "2 then then "
  append output "3 [string tolower $longtoken] $longtoken"

  do_icu_test fts3token-4.6 MiddleOfTheOcean  $input $output
  do_icu_test fts3token-4.7 th_TH  $input $output
  do_icu_test fts3token-4.8 en_US  $input $output

  do_execsql_test 5.1 {
    CREATE VIRTUAL TABLE x1 USING fts3(name,TOKENIZE icu en_US);
    insert into x1 (name) values (NULL);
    insert into x1 (name) values (NULL);
    delete from x1;
  }

  proc cp_to_str {codepoint_list} {
    set fmt [string repeat %c [llength $codepoint_list]]
    eval [list format $fmt] $codepoint_list
  }

  do_test 5.2 {
    set str [cp_to_str {19968 26085 32822 32645 27874 23433 20986}]
    execsql { INSERT INTO x1 VALUES($str) }
  } {}
}

do_test fts3token-internal {
  execsql { SELECT fts3_tokenizer_internal_test() }
} {ok}

#-------------------------------------------------------------------------
# Test empty tokenizer names.
#
do_catchsql_test 6.1.1 {
  CREATE VIRTUAL TABLE t3 USING fts4(tokenize="");
} {1 {unknown tokenizer: }}
do_catchsql_test 6.1.2 {
  CREATE VIRTUAL TABLE t3 USING fts4(tokenize=);
} {1 {unknown tokenizer: }}
do_catchsql_test 6.1.3 {
  CREATE VIRTUAL TABLE t3 USING fts4(tokenize="   ");
} {1 {unknown tokenizer:    }}

do_catchsql_test 6.2.1 {
  SELECT fts3_tokenizer(NULL);
} {1 {unknown tokenizer: }}

do_catchsql_test 6.2.2 {
  SELECT fts3_tokenizer(NULL, X'1234567812345678');
} {1 {argument type mismatch}}
do_catchsql_test 6.2.3 {
  SELECT fts3_tokenizer(NULL, X'12345678');
} {1 {argument type mismatch}}



finish_test







|















|















>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
>
>
>
>
|
>

|


|






|



|




|





|







|













|
|












|
|



|











|
|
|



















|



















>
|
|
|
|
|
|
>



20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225

# If SQLITE_ENABLE_FTS3 is defined, omit this file.
ifcapable !fts3 {
  finish_test
  return
}

set ::testprefix fts3atoken

proc escape_string {str} {
  set out ""
  foreach char [split $str ""] {
    scan $char %c i
    if {$i<=127} {
      append out $char
    } else {
      append out [format {\x%.4x} $i]
    }
  }
  set out
}

#--------------------------------------------------------------------------
# Test cases fts3atoken-1.* are the warm-body test for the SQL scalar
# function fts3_tokenizer(). The procedure is as follows:
#
#   1: Verify that there is no such fts3 tokenizer as 'blah'.
#
#   2: Query for the built-in tokenizer 'simple'. Insert a copy of the
#      retrieved value as tokenizer 'blah'.
#
#   3: Test that the value returned for tokenizer 'blah' is now the
#      same as that retrieved for 'simple'.
#
#   4: Test that it is now possible to create an fts3 table using 
#      tokenizer 'blah' (it was not possible in step 1).
#
#   5: Test that the table created to use tokenizer 'blah' is usable.
#
ifcapable fts3_tokenizer {
  do_test fts3atoken-1.1 {
    catchsql {
      CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah);
    }
  } {1 {unknown tokenizer: blah}}
  do_test fts3atoken-1.2 {
    execsql {
      SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL;
    }
  } {0}
  do_test fts3atoken-1.3 {
    execsql {
      SELECT fts3_tokenizer('blah') == fts3_tokenizer('simple');
    }
  } {1}
  do_test fts3atoken-1.4 {
    catchsql {
      CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah);
    }
  } {0 {}}
  do_test fts3atoken-1.5 {
    execsql {
      INSERT INTO t1(content) VALUES('There was movement at the station');
      INSERT INTO t1(content) VALUES('For the word has passed around');
      INSERT INTO t1(content) VALUES('That the colt from ol regret had got');
      SELECT content FROM t1 WHERE content MATCH 'movement'
    }
  } {{There was movement at the station}}
} else {
  do_catchsql_test 1.6 {
    SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL;
  } {1 {fts3tokenize: disabled - rebuild with -DSQLITE_ENABLE_FTS3_TOKENIZER}}
}

#--------------------------------------------------------------------------
# Test cases fts3atoken-2.* test error cases in the scalar function based
# API for getting and setting tokenizers.
#
do_test fts3atoken-2.1 {
  catchsql {
    SELECT fts3_tokenizer('nosuchtokenizer');
  }
} {1 {unknown tokenizer: nosuchtokenizer}}

#--------------------------------------------------------------------------
# Test cases fts3atoken-3.* test the three built-in tokenizers with a
# simple input string via the built-in test function. This is as much
# to test the test function as the tokenizer implementations.
#
do_test fts3atoken-3.1 {
  execsql {
    SELECT fts3_tokenizer_test('simple', 'I don''t see how');
  }
} {{0 i I 1 don don 2 t t 3 see see 4 how how}}
do_test fts3atoken-3.2 {
  execsql {
    SELECT fts3_tokenizer_test('porter', 'I don''t see how');
  }
} {{0 i I 1 don don 2 t t 3 see see 4 how how}}
ifcapable icu {
  do_test fts3atoken-3.3 {
    execsql {
      SELECT fts3_tokenizer_test('icu', 'I don''t see how');
    }
  } {{0 i I 1 don't don't 2 see see 3 how how}}
}

#--------------------------------------------------------------------------
# Test cases fts3atoken-4.* test the ICU tokenizer. In practice, this
# tokenizer only has two modes - "thai" and "everybody else". Some other
# Asian languages (Lao, Khmer etc.) require the same special treatment as 
# Thai, but ICU doesn't support them yet.
#
ifcapable icu {

  proc do_icu_test {name locale input output} {
    set ::out [db eval { SELECT fts3_tokenizer_test('icu', $locale, $input) }]
    do_test $name {
      lindex $::out 0
    } $output
  }
  
  do_icu_test fts3atoken-4.1 en_US  {}   {}
  do_icu_test fts3atoken-4.2 en_US {Test cases fts3} [list \
    0 test Test 1 cases cases 2 fts3 fts3
  ]

  # The following test shows that ICU is smart enough to recognise
  # Thai chararacters, even when the locale is set to English/United 
  # States.
  #
  set input "\u0e2d\u0e30\u0e44\u0e23\u0e19\u0e30\u0e04\u0e23\u0e31\u0e1a"
  set output    "0 \u0e2d\u0e30\u0e44\u0e23 \u0e2d\u0e30\u0e44\u0e23 "
  append output "1 \u0e19\u0e30 \u0e19\u0e30 "
  append output "2 \u0e04\u0e23\u0e31\u0e1a \u0e04\u0e23\u0e31\u0e1a"

  do_icu_test fts3atoken-4.3 th_TH  $input $output
  do_icu_test fts3atoken-4.4 en_US  $input $output

  # ICU handles an unknown locale by falling back to the default.
  # So this is not an error.
  do_icu_test fts3atoken-4.5 MiddleOfTheOcean  $input $output

  set    longtoken "AReallyReallyLongTokenOneThatWillSurelyRequire"
  append longtoken "AReallocInTheIcuTokenizerCode"

  set    input "short tokens then "
  append input $longtoken
  set    output "0 short short "
  append output "1 tokens tokens "
  append output "2 then then "
  append output "3 [string tolower $longtoken] $longtoken"

  do_icu_test fts3atoken-4.6 MiddleOfTheOcean  $input $output
  do_icu_test fts3atoken-4.7 th_TH  $input $output
  do_icu_test fts3atoken-4.8 en_US  $input $output

  do_execsql_test 5.1 {
    CREATE VIRTUAL TABLE x1 USING fts3(name,TOKENIZE icu en_US);
    insert into x1 (name) values (NULL);
    insert into x1 (name) values (NULL);
    delete from x1;
  }

  proc cp_to_str {codepoint_list} {
    set fmt [string repeat %c [llength $codepoint_list]]
    eval [list format $fmt] $codepoint_list
  }

  do_test 5.2 {
    set str [cp_to_str {19968 26085 32822 32645 27874 23433 20986}]
    execsql { INSERT INTO x1 VALUES($str) }
  } {}
}

do_test fts3atoken-internal {
  execsql { SELECT fts3_tokenizer_internal_test() }
} {ok}

#-------------------------------------------------------------------------
# Test empty tokenizer names.
#
do_catchsql_test 6.1.1 {
  CREATE VIRTUAL TABLE t3 USING fts4(tokenize="");
} {1 {unknown tokenizer: }}
do_catchsql_test 6.1.2 {
  CREATE VIRTUAL TABLE t3 USING fts4(tokenize=);
} {1 {unknown tokenizer: }}
do_catchsql_test 6.1.3 {
  CREATE VIRTUAL TABLE t3 USING fts4(tokenize="   ");
} {1 {unknown tokenizer:    }}

do_catchsql_test 6.2.1 {
  SELECT fts3_tokenizer(NULL);
} {1 {unknown tokenizer: }}
ifcapable fts3_tokenizer {
  do_catchsql_test 6.2.2 {
    SELECT fts3_tokenizer(NULL, X'1234567812345678');
  } {1 {argument type mismatch}}
  do_catchsql_test 6.2.3 {
    SELECT fts3_tokenizer(NULL, X'12345678');
  } {1 {argument type mismatch}}
}


finish_test
Changes to test/fts4langid.test.
354
355
356
357
358
359
360

361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383

384
385
386
387
388
389
390
  for {set i 0} {$i < 50} {incr i} {
    execsql { 
      INSERT INTO t4(docid, content, lid) VALUES($i, 'The Quick Brown Fox', $i) 
    }
  }
}


do_test 4.1.0 {
  reset_db
  set ptr [fts3_test_tokenizer]
  execsql { SELECT fts3_tokenizer('testtokenizer', $ptr) }
  build_multilingual_db_2 db
} {}
do_execsql_test 4.1.1 {
  SELECT docid FROM t4 WHERE t4 MATCH 'quick';
} {0}
do_execsql_test 4.1.2 {
  SELECT docid FROM t4 WHERE t4 MATCH 'quick' AND lid=1;
} {}
do_execsql_test 4.1.3 {
  SELECT docid FROM t4 WHERE t4 MATCH 'Quick' AND lid=1;
} {1}
for {set i 0} {$i < 50} {incr i} {
  do_execsql_test 4.1.4.$i {
    SELECT count(*) FROM t4 WHERE t4 MATCH 'fox' AND lid=$i;
  } [expr 0==($i%2)]
}
do_catchsql_test 4.1.5 {
  INSERT INTO t4(content, lid) VALUES('hello world', 101)
} {1 {SQL logic error or missing database}}


#-------------------------------------------------------------------------
# Test cases 5.*
#
# The following test cases are designed to detect a 32-bit overflow bug
# that existed at one point.
#







>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
>







354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
  for {set i 0} {$i < 50} {incr i} {
    execsql { 
      INSERT INTO t4(docid, content, lid) VALUES($i, 'The Quick Brown Fox', $i) 
    }
  }
}

ifcapable fts3_tokenizer {
  do_test 4.1.0 {
    reset_db
    set ptr [fts3_test_tokenizer]
    execsql { SELECT fts3_tokenizer('testtokenizer', $ptr) }
    build_multilingual_db_2 db
  } {}
  do_execsql_test 4.1.1 {
    SELECT docid FROM t4 WHERE t4 MATCH 'quick';
  } {0}
  do_execsql_test 4.1.2 {
    SELECT docid FROM t4 WHERE t4 MATCH 'quick' AND lid=1;
  } {}
  do_execsql_test 4.1.3 {
    SELECT docid FROM t4 WHERE t4 MATCH 'Quick' AND lid=1;
  } {1}
  for {set i 0} {$i < 50} {incr i} {
    do_execsql_test 4.1.4.$i {
      SELECT count(*) FROM t4 WHERE t4 MATCH 'fox' AND lid=$i;
    } [expr 0==($i%2)]
  }
  do_catchsql_test 4.1.5 {
    INSERT INTO t4(content, lid) VALUES('hello world', 101)
  } {1 {SQL logic error or missing database}}
}

#-------------------------------------------------------------------------
# Test cases 5.*
#
# The following test cases are designed to detect a 32-bit overflow bug
# that existed at one point.
#