/ Check-in [b1ec5478]
Login

Many hyperlinks are disabled.
Use anonymous login to enable hyperlinks.

Overview
Comment:Make sure the host machine has enough disk space before running the large file tests. (CVS 800)
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA1:b1ec547865a76541951433630bd4528d1988a4de
User & Date: drh 2002-12-17 14:13:49
Context
2002-12-17
14:19
If the "fake_big_file" command fails, be sure to close the file before returning. (CVS 801) check-in: fa1b5e21 user: drh tags: trunk
14:13
Make sure the host machine has enough disk space before running the large file tests. (CVS 800) check-in: b1ec5478 user: drh tags: trunk
13:05
Take care not to modify the sqlite* pointer to sqlite_exec() if we suspect that the pointer is stale - that it has previously been passed to sqlite_close(). Possible fix for ticket #202. Prior to this fix, test misuse-5.3 was causing a change to a buffer that had been previously free()-ed. (CVS 799) check-in: f04547ed user: drh tags: trunk
Changes
Hide Diffs Unified Diffs Ignore Whitespace Patch

Changes to test/bigfile.test.

8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
..
38
39
40
41
42
43
44
45
46



47
48






49
50
51
52
53
54
55
..
73
74
75
76
77
78
79
80

81
82






83
84
85
86
87
88
89
...
104
105
106
107
108
109
110
111

112
113






114
115
116
117
118
119
120
#    May you share freely, never taking more than you give.
#
#***********************************************************************
# This file implements regression tests for SQLite library.  The
# focus of this script testing the ability of SQLite to handle database
# files larger than 4GB.
#
# $Id: bigfile.test,v 1.1 2002/12/01 02:00:58 drh Exp $
#

set testdir [file dirname $argv0]
source $testdir/tester.tcl

# This is the md5 checksum of all the data in table t1 as created
# by the first test.  We will use this number to make sure that data
................................................................................
    INSERT INTO t1 SELECT rowid || ' ' || x FROM t1;
    COMMIT;
  }
  execsql {
    SELECT md5sum(x) FROM t1;
  }
} $::MAGIC_SUM

do_test bigfile-1.2 {



  db close
  fake_big_file 4096 test.db






  sqlite db test.db
  execsql {
    SELECT md5sum(x) FROM t1;
  }
} $::MAGIC_SUM

# The previous test may fail on some systems because they are unable
................................................................................
  }
} $::MAGIC_SUM
do_test bigfile-1.5 {
  execsql {
    SELECT md5sum(x) FROM t2;
  }
} $::MAGIC_SUM
do_test bigfile-1.6 {

  db close
  fake_big_file 8192 test.db






  sqlite db test.db
  execsql {
    SELECT md5sum(x) FROM t1;
  }
} $::MAGIC_SUM
do_test bigfile-1.7 {
  execsql {
................................................................................
  }
} $::MAGIC_SUM
do_test bigfile-1.10 {
  execsql {
    SELECT md5sum(x) FROM t3;
  }
} $::MAGIC_SUM
do_test bigfile-1.11 {

  db close
  fake_big_file 16384 test.db






  sqlite db test.db
  execsql {
    SELECT md5sum(x) FROM t1;
  }
} $::MAGIC_SUM
do_test bigfile-1.12 {
  execsql {







|







 








|
>
>
>
|
|
>
>
>
>
>
>







 







<
>
|
|
>
>
>
>
>
>







 







<
>
|
|
>
>
>
>
>
>







8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
..
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
..
82
83
84
85
86
87
88

89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
...
119
120
121
122
123
124
125

126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
#    May you share freely, never taking more than you give.
#
#***********************************************************************
# This file implements regression tests for SQLite library.  The
# focus of this script testing the ability of SQLite to handle database
# files larger than 4GB.
#
# $Id: bigfile.test,v 1.2 2002/12/17 14:13:49 drh Exp $
#

set testdir [file dirname $argv0]
source $testdir/tester.tcl

# This is the md5 checksum of all the data in table t1 as created
# by the first test.  We will use this number to make sure that data
................................................................................
    INSERT INTO t1 SELECT rowid || ' ' || x FROM t1;
    COMMIT;
  }
  execsql {
    SELECT md5sum(x) FROM t1;
  }
} $::MAGIC_SUM

# Try to create a large file - a file that is larger than 2^32 bytes.
# If this fails, it means that the system being tested does not support
# large files.  So skip all of the remaining tests in this file.
#
db close
if {[catch {fake_big_file 4096 test.db}]} {
  puts "**** Unable to create a file larger than 4096 MB. *****"
  finish_test
  return
}

do_test bigfile-1.2 {
  sqlite db test.db
  execsql {
    SELECT md5sum(x) FROM t1;
  }
} $::MAGIC_SUM

# The previous test may fail on some systems because they are unable
................................................................................
  }
} $::MAGIC_SUM
do_test bigfile-1.5 {
  execsql {
    SELECT md5sum(x) FROM t2;
  }
} $::MAGIC_SUM


db close
if {[catch {fake_big_file 8192 test.db}]} {
  puts "**** Unable to create a file larger than 8192 MB. *****"
  finish_test
  return
}

do_test bigfile-1.6 {
  sqlite db test.db
  execsql {
    SELECT md5sum(x) FROM t1;
  }
} $::MAGIC_SUM
do_test bigfile-1.7 {
  execsql {
................................................................................
  }
} $::MAGIC_SUM
do_test bigfile-1.10 {
  execsql {
    SELECT md5sum(x) FROM t3;
  }
} $::MAGIC_SUM


db close
if {[catch {fake_big_file 16384 test.db}]} {
  puts "**** Unable to create a file larger than 16384 MB. *****"
  finish_test
  return
}

do_test bigfile-1.11 {
  sqlite db test.db
  execsql {
    SELECT md5sum(x) FROM t1;
  }
} $::MAGIC_SUM
do_test bigfile-1.12 {
  execsql {