Version 9 of TcLeo

Updated 2002-04-08 15:47:09

by Reinhard Max

This little script sends it's command line arguments as a query to the online dictionary at http://dict.leo.org and writes the parsed result to stdout. It uses Tcl's http package and the htmlparse and ncgi packages from Tcllib.


 package require http
 package require htmlparse
 package require ncgi
 namespace eval ::dict.leo.org {
    variable table ""
    variable TR ""
    variable TD ""
    proc parse {tag close options body} {
        variable TR
        variable TD
        variable table
        switch -- $close$tag {
            TR     {set TR ""}
            TD     {set TD ""}
            /TR    {if {[llength $TR]} {lappend table $TR }}
            /TD    {if {[llength $TD]} {lappend TR [join $TD]}}
            default {append TD [string map {  { }} $body]}
        }
    }
    proc query {query} {
        variable table
        set url "http://dict.leo.org/?search=[::ncgi::encode $query]"
        set tok [::http::geturl $url]
        foreach line [split [::http::data $tok] "\n"] {
            if {[string match "*search results*" $line]} break
        }
        ::http::cleanup $tok
        set table ""
        ::htmlparse::parse -cmd ::dict.leo.org::parse $line
        return $table
    }
 }
 proc max {a b} {expr {$a > $b ? $a : $b}}
 proc main {argv} {
    set table [dict.leo.org::query [join $argv]]
    set max 0
    foreach row $table {
        foreach c $row {set max [max $max [string length $c]]}
    }
    incr max
    set sep [string repeat = $max]
    set table [linsert $table 0 {English German} [list $sep $sep]]
    foreach row $table {
        foreach {c1 c2} $row break
        puts [format "%-*s %-*s" $max $c1 $max $c2]
    }
    puts ""
 }
 main $argv

RS: Proud owners of a firewall might have to add a line like

    http::config -proxyhost proxy -proxyport 80

at the very top of proc dict. Helped in my case to really get out.


Web scraping | Using Tcl to write WWW client side applications