Fossil

Check-in [a1064042]
Login

Many hyperlinks are disabled.
Use anonymous login to enable hyperlinks.

Overview
Comment:Add a new test script that runs 10,000 web pages without valgrind looking for performance issues or fatal errors. Fix one incorrect SQL statement found by this script. Update the valgrind test script to run discovered web pages in a random order.
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA1: a1064042722ba2a2d41ec103cd18284e9c5a9ba1
User & Date: drh 2012-11-08 16:09:45
Context
2012-11-08
16:25
Short-circuit a rare pathological case in the diff generator to prevent it from taking too much time. check-in: 353438a8 user: drh tags: trunk
16:09
Add a new test script that runs 10,000 web pages without valgrind looking for performance issues or fatal errors. Fix one incorrect SQL statement found by this script. Update the valgrind test script to run discovered web pages in a random order. check-in: a1064042 user: drh tags: trunk
15:45
accidently committed two unrelated files... check-in: b1199eb9 user: jan.nijtmans tags: trunk
Changes
Hide Diffs Unified Diffs Ignore Whitespace Patch

Changes to src/attach.c.

523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
      @ <pre>
      @ %h(z)
      @ </pre>
    }
  }else if( strncmp(zMime, "image/", 6)==0 ){
    @ <img src="%R/raw?name=%s(zSrc)&m=%s(zMime)"></img>
  }else{
    int sz = db_int(0, "SELECT sz FROM blob WHERE rid=%d", ridSrc);
    @ <i>(file is %d(sz) bytes of binary data)</i>
  }
  @ </blockquote>
  manifest_destroy(pAttach);
  blob_reset(&attach);
  style_footer();
}







|







523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
      @ <pre>
      @ %h(z)
      @ </pre>
    }
  }else if( strncmp(zMime, "image/", 6)==0 ){
    @ <img src="%R/raw?name=%s(zSrc)&m=%s(zMime)"></img>
  }else{
    int sz = db_int(0, "SELECT size FROM blob WHERE rid=%d", ridSrc);
    @ <i>(file is %d(sz) bytes of binary data)</i>
  }
  @ </blockquote>
  manifest_destroy(pAttach);
  blob_reset(&attach);
  style_footer();
}

Added test/many-www.tcl.



























































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#!/usr/bin/tclsh
#
# Run this script from within any open Fossil checkout.  Example:
#
#   tclsh many-www.tcl | tee out.txt
#
# About 10,000 different web page requests will be made.  Each is timed
# and the time shown on output. Use this script to search for segfault problems
# or to look for pages that need optimization.
#
proc run_query {url} {
  set fd [open q.txt w]
  puts $fd "GET $url HTTP/1.0\r\n\r"
  close $fd
  return [exec fossil test-http <q.txt]
}
set todo {}
foreach url {
  /home
  /timeline
  /brlist
  /taglist
  /reportlist
  /setup
  /dir
  /wcontent
} {
  set seen($url) 1
  set pending($url) 1
}
set limit 10000
set npending [llength [array names pending]]
proc get_pending {} {
  global pending npending
  set res [lindex [array names pending] [expr {int(rand()*$npending)}]]
  unset pending($res)
  incr npending -1
  return $res
}
for {set i 0} {$npending>0 && $i<$limit} {incr i} {
  set url [get_pending]
  puts -nonewline "([expr {$i+1}]) $url "
  flush stdout
  set tm [time {set x [run_query $url]}]
  set ms [lindex $tm 0]
  puts [format {%.3fs} [expr {$ms/1000000.0}]]
  flush stdout
  if {[string length $x]>1000000} {
    set x [string range $x 0 1000000]
  }
  while {[regexp {<[aA] .*?href="(/[a-z].*?)".*?>(.*)$} $x all url tail]} {
    # if {$npending>2*($limit - $i)} break
    set u2 [string map {&lt; < &gt; > &quot; \" &amp; &} $url]
    if {![info exists seen($u2)]} {
      set pending($u2) 1
      set seen($u2) 1
      incr npending
    }
    set x $tail
  }
}

Changes to test/valgrind-www.tcl.

21
22
23
24
25
26
27

28
29
30

31
32










33
34
35
36
37
38
39
40

41
42
43
44
  /home
  /timeline
  /brlist
  /taglist
  /reportlist
  /setup
  /dir

} {
  set seen($url) 1
  lappend todo $url

}
for {set i 0} {$i<[llength $todo] && $i<1000} {incr i} {










  set url [lindex $todo $i]
  puts "====== ([expr {$i+1}]) $url ======"
  set x [run_query $url]
  while {[regexp {<[aA] .*?href="(/[a-z].*?)".*?>(.*)$} $x all url tail]} {
    set u2 [string map {&lt; < &gt; > &quot; \" &amp; &} $url]
    if {![info exists seen($u2)]} {
      lappend todo $u2
      set seen($u2) 1

    }
    set x $tail
  }
}







>


<
>

<
>
>
>
>
>
>
>
>
>
>
|





|

>




21
22
23
24
25
26
27
28
29
30

31
32

33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
  /home
  /timeline
  /brlist
  /taglist
  /reportlist
  /setup
  /dir
  /wcontent
} {
  set seen($url) 1

  set pending($url) 1
}

set limit 1000
set npending [llength [array names pending]]
proc get_pending {} {
  global pending npending
  set res [lindex [array names pending] [expr {int(rand()*$npending)}]]
  unset pending($res)
  incr npending -1
  return $res
}
for {set i 0} {$npending>0 && $i<$limit} {incr i} {
  set url [get_pending]
  puts "====== ([expr {$i+1}]) $url ======"
  set x [run_query $url]
  while {[regexp {<[aA] .*?href="(/[a-z].*?)".*?>(.*)$} $x all url tail]} {
    set u2 [string map {&lt; < &gt; > &quot; \" &amp; &} $url]
    if {![info exists seen($u2)]} {
      set pending($u2) 1
      set seen($u2) 1
      incr npending
    }
    set x $tail
  }
}