Index: openacs-4/packages/wiki/wiki.info =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/wiki.info,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/wiki.info 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,26 @@ + + + + + Wiki + Wikis + f + f + + + Dave Bauer + Wiki + Wiki implementation on the CR + 0 + + + + + + + + + + + + Index: openacs-4/packages/wiki/lib/footer.adp =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/lib/footer.adp,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/lib/footer.adp 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,4 @@ +
+
+ Edit +
\ No newline at end of file Index: openacs-4/packages/wiki/lib/footer.tcl =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/lib/footer.tcl,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/lib/footer.tcl 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,4 @@ +# footer for wiki pages shows edit, recent changes, search etc... +# show last modified + + Index: openacs-4/packages/wiki/lib/new.adp =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/lib/new.adp,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/lib/new.adp 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,8 @@ + + @title@ + @header_stuff@ + @context@ + @focus@ + + + \ No newline at end of file Index: openacs-4/packages/wiki/lib/new.tcl =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/lib/new.tcl,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/lib/new.tcl 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,83 @@ +ad_page_contract { + create a new wiki page +} -query { + name + item_id:integer,optional +} + +set folder_id [wiki::get_folder_id] +set user_id [ad_conn user_id] +set ip_address [ad_conn peeraddr] + +permission::require_permission \ + -object_id $folder_id \ + -party_id $user_id \ + -privilege "create" + +# this is a wiki so we can always force the +# format and don't need richtext widget +set edit "" +ad_form -name new -action "new" -export {name edit} -form { + item_id:key + title:text + content:text(textarea) + revision_notes:text(textarea) + +} -edit_request { + + # content::item::get -item_id $item_id + db_1row get_item "select cr_items.item_id, title, content from cr_items, cr_revisions where name=:name and parent_id=:folder_id and latest_revision=revision_id" + +} -new_data { + + content::item::new \ + -name $name \ + -parent_id $folder_id \ + -creation_user $user_id \ + -creation_ip $ip_address \ + -title $title \ + -text $content \ + -description $revision_notes \ + -is_live "t" \ + -storage_type "text" \ + -mime_type "text/x-openacs-wiki" + + # do something clever with internal refs + set stream [Wikit::Format::TextToStream $content] + set refs [Wikit::Format::StreamToRefs $stream "wiki::info"] + +} -edit_data { + + content::revision::new \ + -item_id $item_id \ + -title $title \ + -content $content \ + -description $revision_notes + + db_dml set_live "update cr_items set live_revision=latest_revision where item_id=:item_id" + +} -after_submit { + # do something clever with internal refs + set stream [Wikit::Format::TextToStream $content] + set refs [Wikit::Format::StreamToRefs $stream "wiki::info"] + + db_foreach get_ids "select ci.item_id as ref_item_id from cr_items ci left join cr_item_rels cr on (cr.item_id=:item_id or cr.related_object_id=:item_id) where ci.parent_id=:folder_id and ci.name in ([template::util:::tcl_to_sql_list $refs]) and cr.rel_id is null" { + content::item::relate \ + -item_id $item_id \ + -object_id $ref_item_id \ + -relation_tag "wiki_reference" + } + + + ad_returnredirect "./$name" + +} + +set title "" +set context [list $title] +set header_stuff "" +set focus "" + +ad_return_template + + Index: openacs-4/packages/wiki/lib/page.adp =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/lib/page.adp,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/lib/page.adp 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,15 @@ + + @title@ + @header_stuff@ + @context@ + @focus@ + +@content;noquote@ + +

Pages that link to his page: + + @related_items.title@ + +

+
+ \ No newline at end of file Index: openacs-4/packages/wiki/lib/page.tcl =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/lib/page.tcl,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/lib/page.tcl 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,68 @@ +ad_page_contract { + +} -query { + edit:optional + revision_id:optional +} + +# +# +# Show or edit wiki pages +# +# @author Dave Bauer (dave@thedesignexperience.org) +# @creation-date 2004-09-03 +# @arch-tag: e5d58124-f276-4a01-a61a-e85959bbe0d1 +# @cvs-id $Id: page.tcl,v 1.1 2005/01/04 18:01:18 daveb Exp $ + +set folder_id [wiki::get_folder_id] +set name [ad_conn path_info] +if {$name eq ""} { + # the path resolves directly to a site node + set name "index" +} + +set item_id [content::item::get_id -item_path $name -resolve_index "t" -root_folder_id $folder_id] +if {[string equal "" $item_id]} { + rp_form_put name [ad_conn path_info] + rp_internal_redirect "/packages/wiki/lib/new" + ad_script_abort +} + +if {[info exists edit]} { + set form [rp_getform] + ns_log notice " +DB -------------------------------------------------------------------------------- +DB DAVE debugging /var/lib/aolserver/openacs-5-head-cr-tcl-api/packages/wiki/lib/page.tcl +DB -------------------------------------------------------------------------------- +DB form = '${form}' +DB [ns_set find $form "item_id"] +DB --------------------------------------------------------------------------------" + if {[ns_set find $form "item_id"] < 0} { + ns_log notice "Adding Item_id" + rp_form_put item_id $item_id + rp_form_put name $name + } + rp_internal_redirect "/packages/wiki/lib/new" +} + +content::item::get -item_id $item_id -attributes [list [list text ""] [list content ""]] +set revision_id $content_item(revision_id) +set content [db_string get_content "select content from cr_revisions where revision_id=:revision_id" -default ""] + +set stream [Wikit::Format::TextToStream $content] +set refs [Wikit::Format::StreamToRefs $stream "wiki::info"] +db_multirow related_items get_related_items "select cr.name, cr.title, cr.description from cr_revisionsx cr, cr_items ci, cr_item_rels cir where cir.related_object_id=:item_id and cir.relation_tag='wiki_reference' and ci.live_revision=cr.revision_id and ci.item_id=cir.item_id" + +set content [ad_wiki_text_to_html $content "wiki::info"] +set title $content_item(title) +set context [list $title] +set focus "" +set header_stuff "" + +set write_p [permission::permission_p \ + -object_id $item_id \ + -party_id [ad_conn user_id] \ + -privilege "write" + ] + +ad_return_template "page" Index: openacs-4/packages/wiki/sql/oracle/wiki-create.sql =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/sql/oracle/wiki-create.sql,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/sql/oracle/wiki-create.sql 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,11 @@ +-- +-- +-- +-- @author Dave Bauer (dave@thedesignexperience.org) +-- @creation-date 2004-09-06 +-- @arch-tag: 0d6b6723-0e95-4c00-8a84-cb79b4ad3f9d +-- @cvs-id $Id: wiki-create.sql,v 1.1 2005/01/04 18:01:18 daveb Exp $ +-- + +insert into cr_mime_types values ('Text - Wiki','text/x-openacs-wiki',''); +insert into cr_mime_types values ('Text - Markdown','text/x-openacs-markdown',''); \ No newline at end of file Index: openacs-4/packages/wiki/sql/postgresql/wiki-create.sql =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/sql/postgresql/wiki-create.sql,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/sql/postgresql/wiki-create.sql 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,11 @@ +-- +-- +-- +-- @author Dave Bauer (dave@thedesignexperience.org) +-- @creation-date 2004-09-06 +-- @arch-tag: 0d6b6723-0e95-4c00-8a84-cb79b4ad3f9d +-- @cvs-id $Id: wiki-create.sql,v 1.1 2005/01/04 18:01:18 daveb Exp $ +-- + +insert into cr_mime_types values ('Text - Wiki','text/x-openacs-wiki',''); +insert into cr_mime_types values ('Text - Markdown','text/x-openacs-markdown',''); \ No newline at end of file Index: openacs-4/packages/wiki/tcl/wiki-install-procs.tcl =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/tcl/wiki-install-procs.tcl,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/tcl/wiki-install-procs.tcl 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,72 @@ +# + +ad_library { + + Install callbacks for wiki package + + @author Dave Bauer (dave@thedesignexperience.org) + @creation-date 2004-09-06 + @arch-tag: 86a85f67-568e-422f-bd56-6c8fba89f1a2 + @cvs-id $Id: wiki-install-procs.tcl,v 1.1 2005/01/04 18:01:18 daveb Exp $ +} + +namespace eval wiki::install {} + +ad_proc -public wiki::install::package_install { +} { + Callback for package install + + @author Dave Bauer (dave@thedesignexperience.org) + @creation-date 2004-09-06 + + @return + + @error +} { + + content::type::register_relation_type \ + -content_type "content_revision" \ + -target_type "acs_object" \ + -relation_tag "wiki_referece" +} + +ad_proc -public wiki::install::after_instantiate { + -package_id + -node_id +} { + After instantiate callback for wiki package + + @author Dave Bauer (dave@thedesignexperience.org) + @creation-date 2004-09-06 + + @param package_id + + @param node_id + + @return + + @error +} { + # create new folder + set folder_id [content::folder::new \ + -name $package_id \ + -label "Wiki Folder" \ + -package_id $package_id \ + -context_id $package_id] + + # register content types + content::folder::register_content_type \ + -folder_id $folder_id \ + -content_type "content_revision" \ + -include_subtypes "t" + + # TODO: setup default page to fill in index + set index_page_id [content::item::new \ + -name "index" \ + -parent_id $folder_id \ + -title "New Wiki" \ + -storage_type "text"] + db_dml set_live "update cr_items set live_revision=latest_revision where item_id=:index_page_id" + +} + Index: openacs-4/packages/wiki/tcl/wiki-procs.tcl =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/tcl/wiki-procs.tcl,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/tcl/wiki-procs.tcl 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,145 @@ +# + +ad_library { + + procs for wiki style cms + + @author Dave Bauer (dave@thedesignexperience.org) + @creation-date 2004-09-03 + @arch-tag: 407a000b-0f99-4129-ae94-40679a0d4df2 + @cvs-id $Id: wiki-procs.tcl,v 1.1 2005/01/04 18:01:18 daveb Exp $ +} + +namespace eval wiki:: {} + +ad_proc -public wiki::info { + ref +} { + Tries to resolve a wiki reference to + a URL within OpenACS + + @author Dave Bauer dave@thedesignexperience.org + @creation-date 2004-09-03 + + @param ref Wiki reference to a file/url + + @return id, name, date +} { + + # resolve reference to a package_id/package_key + + # see if package_key::wiki_info exists, if so call it + # first element is the relative URL of the link + # second element I have no idea + # 3rd element is the last modified date. leave empty if the + # ref doesn't exist yet + # this sucks we have to hammer the databse for every link + set package_id [ad_conn package_id] + set d [db_string get_lm "select o.last_modified from acs_objects o, cr_items ci, cr_folders cf where cf.package_id=:package_id and ci.parent_id=cf.folder_id and ci.name=:ref and o.object_id=ci.item_id" -default ""] + set ret [list "${ref}" "${ref}" "$d"] + ns_log notice " +DB -------------------------------------------------------------------------------- +DB DAVE debugging procedure wiki::info +DB -------------------------------------------------------------------------------- +DB ref = '${ref}' +DB ret = '${ret}' +DB --------------------------------------------------------------------------------" + return $ret + +} + +ad_proc -public wiki::get_folder_id { + {-package_id ""} +} { + Return content repository folder_id for the + specified wiki package_id. + + @author Dave Bauer (dave@thedesignexperience.org) + @creation-date 2004-09-06 + + + @param package_id If not speicifed use the current package_id from + ad_conn. It there is no current connection or folder does not + exist, returns empty string. + + @return + + @error +} { + # should really map site_nodes to cr_folders, but I + # want to see what can be done with stock OpenACS + if {$package_id eq ""} { + if {[ad_conn -connected_p]} { + set package_id [ad_conn package_id] + } else { + return "" + } + } + return [db_string get_folder_id \ + "select folder_id from cr_folders where package_id=:package_id" \ + -default ""] +} + + +# procs for generic wiki::info procedure + +# procs to index and for search syndication/rss + +# procs for recent changes (use search syndication??) + +# attachments/images/uploads? + + +# TODO figure out where this belongs! +# it needs to integrate with the richtext widget someday + +ad_proc -public ad_wiki_text_to_html { + text + {info_proc "ad_wiki_info"} +} { + Converts Wiki formatted text to html + @author Dave Bauer (dave@thedesignexperience.org) + @creation-date 2004-09-03 +} { + set stream [Wikit::Format::TextToStream $text] +# wiki::info will find the parent site node of a reference, and + # look for a proc called package-key::wiki_info which should + # return the id, name, modified date of the item + # (i think id means "url" but I might be wrong!) + set html [Wikit::Format::StreamToHTML $stream " " $info_proc] + return [lindex $html 0] +} + +ad_proc -public ad_wiki_info { + ref +} { + Tries to resolve a wiki reference to + a URL within OpenACS + + @author Dave Bauer dave@thedesignexperience.org + @creation-date 2004-09-03 + + @param ref Wiki reference to a file/url + + @return id, name, date +} { + + # resolve reference to a package_id/package_key + + # see if package_key::wiki_info exists, if so call it + # first element is the relative URL of the link + # second element I have no idea + # 3rd element is the last modified date. leave empty if the + # ref doesn't exist yet + + set ret [list "${ref}" "${ref}" "1"] + ns_log notice " +DB -------------------------------------------------------------------------------- +DB DAVE debugging procedure wiki::info +DB -------------------------------------------------------------------------------- +DB ref = '${ref}' +DB ret = '${ret}' +DB --------------------------------------------------------------------------------" + return $ret + +} Index: openacs-4/packages/wiki/tcl/wikit-procs.tcl =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/tcl/wikit-procs.tcl,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/tcl/wikit-procs.tcl 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,793 @@ +# -*- tcl -*- +# Formatter for wiki markup text, CGI as well as GUI + +#package provide Wikit::Format 1.0 + +namespace eval Wikit::Format { + namespace export TextToStream StreamToTk StreamToHTML StreamToRefs \ + StreamToUrls + + # In this file: + # + # proc TextToStream {text} -> stream + # proc StreamToTk {stream infoProc} -> {{tagged-text} {urls}} + # proc StreamToHTML {stream cgiPrefix infoProc} -> {{html} {urls}} + # proc StreamToRefs {stream infoProc} -> {pageNum ...} + # proc StreamToUrls {stream} -> {url type ...} + # + # The "Text" format is a Wiki-like one you can edit with a text editor. + # The "Tk" format can insert styled text information in a text widget. + # The "HTML" format is the format generated for display by a browser. + # The "Refs" format is a list with details about embedded references. + # The "Urls" format is a list of external references, bracketed or not. + # The "Stream" format is a Tcl list, it's only used as interim format. + + # ========================================================================= + + ### More format documentation + + # ========================================================================= + + # + # Ad "Tk") This is a list of pairs {text taglist text taglist ...} + # which can be directly inserted into any text widget. + # + # Ad "Stream") This is the first time that the stream format is documented. + # + # The base format is that of a list of pairs {cmd arg cmd arg ...} + # The available commands fall into three categories [x]: + # + # 1. Data carriers + # 2. Visual markers + # 3. Structural markers + # + # [x] In the previous incarnation of this stream format the categories + # were essentially all mixed up and jumbled together. For example + # the command 'T' crossed category 1 and 3, introducing a new para- + # graph and also carrying the first fragment of text of said para- + # graph. That made for difficult creation and difficult interpreta- + # tion. It is the separation of the categories which makes the re- + # organized format much easier to generate and convert (<=> simpler + # code, which is even faster). (Not to mention the eviction of + # treating [, ], {, }, and \ as special characters. They are not). + # + # Ad 1) The empty string and 'g', 'u' and 'x'. The first is for text, + # the others specify the various possible links. + # + # Cmd Argument + # ------------------------------------------------------ + # {} The text to display + # g Name/Title of referenced wiki page + # u external URL, was unbracket'ed in sources + # x external URL, bracket'ed in sources + # ------------------------------------------------------ + # + # Ad 2) Currently only two: 'b' and 'i' for bold and italic emphasis. + # The argument specifies if the emphasis is switched on or off. + # The permitted values are 0 (off) and 1 (on). + # + # Ad 3) These are the markers for the various distinctive sections + # in wiki markup. + # + # Cmd 'Begin' Argument + # ------------------------------------------------------ + # T Paragraph Nesting level + # Q Quoted line Nesting level + # U List item (unordered) Nesting level + # O List item (enumerated) Nesting level + # I List item (term) Nesting level + # D List item (term def) Nesting level + # H Horizontal rule Line-width + # ------------------------------------------------------ + # + # Note: The current frontend renderer provides only nesting + # level 0 and a line-width 1. The current backend + # renderers ignore this information. + # + + # ========================================================================= + # ========================================================================= + + ### Frontend renderer :: Wiki Markup ==> Stream ### + + # ========================================================================= + # ========================================================================= + + ## Basic operation: Each line is classified via regexes and then handled + ## according to its type. Text lines are coalesced into paragraphs, with + ## some special code to deal with the boundary between normal text and + ## verbatim quoted text. Each collected line is then separated into chunks + ## of text, highlighting command and links (wiki page / external). This is + ## then added to the internal representation. + + proc TextToStream {text} { + # Based upon ideas from the kiwi renderer. One step rendering into + # the internal representation without a script as intermediate step. + + set irep [list] ; # Internal representation generated here. + set paragraph "" ; # Buffer for the text of a single paragraph + set empty_std 0 ; # Boolean flag. Set if the preceding line was empty. + + foreach line [split $text \n] { + # Per line, classify the it and extract the main textual information. + foreach {tag depth txt aux} [linetype $line] break ; # lassign + + # Classification tags + # + # UL, OL, DL = Lists (unordered/bullet, ordered/enum, + # definition/itemized) + # PRE = Verbatim / Quoted lines + # HR = Horizontal rule + # STD = Standard text + + ## Whenever we encounter a special line, not quoted, any + ## preceding empty line has no further effect. + # + switch -exact -- $tag { + HR - UL - OL - DL {set empty_std 0} + default {} + } + + ## Whenever we encounter a special line, including quoted, we + ## have to render the data of the preceding paragraph, if + ## there is any. + # + switch -exact -- $tag { + HR - UL - OL - DL - PRE { + if {$paragraph != {}} { + lappend irep T 0 ; render $paragraph + set paragraph "" + } + } + default {} + } + + ## Now processs the lines according to their types. + # + # Tag | depth | txt | pfx | aux + # ----+---------------+-----------------+---------------+--------------- + # UL | nesting level | text of item | before bullet | bullet + # OL | nesting level | text of item | before bullet | bullet + # DL | nesting level | term definition | before bullet | term + # PRE | 1 | text to display | + # HR | 0 | text of ruler | + # STD | 0 | text to display | + # ----+---------------+-----------------+---------------+--------------- + + # HR - Trivial + # UL, OL - Mark their beginning and then render their text + # - like a normal paragraph. + # DL - Like list item, except that there are two different + # parts of text we have to render, term and term definition. + # PRE - Quoted text is searched for links, but nothing + # more. An empty preceding line is added to the + # quoted section to keep it at a distance from the + # normal text coming before. + # STD - Lines are added to the paragraph until an empty one is + # encountered. This closes the paragraph. + + switch -exact -- $tag { + HR {lappend irep H 1} + UL {lappend irep U 0 ; render $txt} + OL {lappend irep O 0 ; render $txt} + DL { + lappend irep I 0 ; render $aux + lappend irep D 0 ; render $txt + } + PRE { + # Transform a preceding 'STD {}' into an empty Q line, + # i.e make it part of the verbatim section, enforce + # visual distance. + + if {$empty_std} {lappend irep Q 0 {} {}; set empty_std 0} + lappend irep Q 0 + if {$txt != {}} {rlinks $txt} + } + STD { + if {$txt == {}} { + if {$paragraph != {}} { + lappend irep T 0 ; render $paragraph + set paragraph "" + } + set empty_std 1 + } else { + if {$paragraph != {}} {append paragraph " "} + append paragraph $txt + set empty_std 0 + } + } + default { + error "Unknown linetype $tag" + } + } + } + + # Render the last paragraph, if any. + + if {$paragraph != {}} { + lappend irep T 0 ; render $paragraph + } + + return $irep + } + + proc linetype {line} { + # Categorize a line of wiki text based on indentation and prefix + + set line [string trimright $line] + + ## Compat: retain tabs ... + ## regsub -all "\t" $line " " line + # + ## More compat'ibility ... + ## The list tags allow non-multiples of 3 if the prefix contains at + ## least 3 spaces. The standard wiki accepts anything beyond 3 spaces. + ## Keep the kiwi regexes around for future enhancements. + + foreach {tag re} { + UL {^( + {0,2})(\*) (\S.*)$} + OL {^( + {0,2})(\d)\. (\S.*)$} + DL {^( + {0,2})([^:]+): (\S.*)$} + + UL {^( +)(\*) (\S.*)$} + OL {^( +)(\d)\. (\S.*)$} + DL {^( +)([^:]+): (\S.*)$} + } { + # Compat: Remove restriction to multiples of 3 spaces. + + if {[regexp $re $line - pfx aux txt] } { + # && string length $pfx % 3 == 0 + return [list $tag [expr {[string length $pfx]/3}] $txt $aux] + } + } + + # Compat: Accept a leading TAB is marker for quoted text too. + + if {([string index $line 0] == " ") || ([string index $line 0] == "\t")} { + return [list PRE 1 $line] + } + if {[regexp {^-{4,}$} $line]} { + return [list HR 0 $line] + } + return [list STD 0 $line] + } + + proc rlinks {text} { + # Convert everything which looks like a link into a link. This + # command is called for quoted lines, and only quoted lines. + + upvar irep irep + + # Compat: (Bugfix) Added " to the regexp as proper boundary of an url. + set re {\m(https?|ftp|news|mailto|file):(\S+[^\]\)\s\.,!\?;:'>"])} + set txt 0 + set end [string length $text] + + ## Find the places where an url is inside of the quoted text. + + foreach {match dummy dummy} [regexp -all -indices -inline $re $text] { + # Skip the inner matches of the RE. + foreach {a e} $match break + if {$a > $txt} { + # Render text which was before the url + lappend irep {} [string range $text $txt [expr {$a - 1}]] + } + # Render the url + lappend irep u [string range $text $a $e] + set txt [incr e] + } + if {$txt < $end} { + # Render text after the last url + lappend irep {} [string range $text $txt end] + } + return + } + + proc render {text} { + # Rendering of regular text: links, markup, brackets. + + # The main idea/concept behind the code below is to find the + # special features in the text and to isolate them from the normal + # text through special markers (\0\1...\0). As none of the regular + # expressions will match across these markers later passes + # preserve the results of the preceding passes. At the end the + # string is split at the markers and then forms the list to add to + # the internal representation. This way of doing things keeps the + # difficult stuff at the C-level and avoids to have to repeatedly + # match and process parts of the string. + + upvar irep irep + variable codemap + + ## puts stderr \]>>$irep<<\[ + ## puts stderr >>>$text<<< + + # Detect page references, external links, bracketed external + # links, brackets and markup (hilites). + + # Complex RE's used to process the string + set pre {\[([^\]]*)]} ; # page references ; # compat + set lre {\m(https?|ftp|news|mailto|file):(\S+[^\]\)\s\.,!\?;:'>"])} ; # links + set blre "\\\[\0\1u\2(\[^\0\]*)\0\\\]" + + # " - correct emacs hilite + + # Order of operation: + # - Remap double brackets to avoid their interference. + # - Detect embedded links to external locations. + # - Detect brackets links to external locations (This uses the + # fact that such links are already specially marked to make it + # easier. + # - Detect references to other wiki pages. + # - Render bold and italic markup. + # + # Wiki pages are done last because there is a little conflict in + # the RE's for links and pages: Both allow usage of the colon (:). + # Doing pages first would render links to external locations + # incorrectly. + # + # Note: The kiwi renderer had the order reversed, but also + # disallowed colon in page titles. Which is in conflict with + # existing wiki pages which already use that character in titles + # (f.e. [COMPANY: Oracle]. + + # Make sure that double brackets do not interfere with the + # detection of links. + regsub -all {\[\[} $text {\&!} text + + ## puts stderr A>>$text<<* + + # Isolate external links. + regsub -all $lre $text "\0\1u\2\\1:\\2\0" text + ## puts stderr C>>$text<<* + + # External links in brackets are simpler cause we know where the + # links are already. + regsub -all $blre $text "\0\1x\2\\1\0" text + ## puts stderr D>>$text<<* + + # Now handle wiki page references + regsub -all $pre $text "\0\1g\2\\1\0" text + ## puts stderr B>>$text<<* + + # Hilites are transformed into on and off directives. + # This is a bit more complicated ... Hilites can be written + # together and possible nested once, so it has make sure that + # it recognizes everything in the correct order! + + # Examples ... + # {''italic'''''bold'''} {} {italicbold} + # {'''bold'''''italic''} {} {bolditalic} + # {'''''italic_bold'''''} {} {italic_bold} + + # First get all un-nested hilites + while { + [regsub -all {'''([^']+?)'''} $text "\0\1b+\0\\1\0\1b-\0" text] || + [regsub -all {''([^']+?)''} $text "\0\1i+\0\\1\0\1i-\0" text] + } {} + + # And then the remaining ones. This also captures the hilites + # where the highlighted text contains single apostrophes. + + regsub -all {'''(.+?)'''} $text "\0\1b+\0\\1\0\1b-\0" text + regsub -all {''(.+?)''} $text "\0\1i+\0\\1\0\1i-\0" text + + + # Normalize brackets ... + set text [string map {&! [ ]] ]} $text] + + # Listify and generate the final representation of the paragraph. + + ## puts stderr *>>$text<<* + + foreach item [split $text \0] { + ## puts stderr ====>>$item<<< + + set cmd {} ; set detail {} + foreach {cmd detail} [split $item \2] break + set cmd [string trimleft $cmd \1] + + ## puts stderr ====>>$cmd|$detail<<< + + switch -exact -- $cmd { + b+ {lappend irep b 1} + b- {lappend irep b 0} + i+ {lappend irep i 1} + i- {lappend irep i 0} + default { + if {$detail == {}} { + # Pure text + if {$cmd != ""} { + lappend irep {} $cmd + } + } else { + # References. + lappend irep $cmd $detail + } + } + } + + ## puts stderr ======\]>>$irep<<\[ + } + ## puts stderr ======\]>>$irep<<\[ + return + } + + # ========================================================================= + # ========================================================================= + + ### Backend renderer :: Stream ==> Tk ### + + # ========================================================================= + # ========================================================================= + + # Output specific conversion. Takes a token stream and converts this into + # a three-element list. The first element is a list of text fragments and + # tag-lists, as described at the beginning as the "Tk" format. The second + # element is a list of triples listing the references found in the page. + # This second list is required because some information about references + # is missing from the "Tk" format. And adding them into that format would + # make the insertion of data into the final text widget ... complex (which + # is an understatement IMHO). Each triple consists of: url-type (g, u, x), + # page-local numeric id of url (required for and used in tags) and + # reference text, in this order. The third list is a list of embedded + # images (i.e. stored in "images" view), to be displayed in text widget. + + # Note: The first incarnation of the rewrite to adapt to the new + # "Stream" format had considerable complexity in the part + # assembling the output. It kept knowledge about the last used + # tags and text around, using this to merge runs of text having + # the same taglist, thus keeping the list turned over to the text + # widget shorter. Thinking about this I came to the conclusion + # that removal of this complexity and replacing it with simply + # unconditional lappend's would gain me time in StreamToTk, but + # was also unsure how much of a negative effect the generated + # longer list would have on the remainder of the conversion (setup + # of link tag behaviour in the text widget, insertion in to the + # text widget). Especially if the drain would outweigh the gain. + # As can be seen from the code chosen here, below, I found that + # the gain through the simplification was much more than the drain + # later. I gained 0.3 usecs in this stage and lost 0.13 in the + # next (nearly double time), overall gain 0.17. + + proc StreamToTk {s {ip ""}} { + variable tagmap ; # pre-assembled information, tags and spacing + variable vspace ; # .... + # ; # State of renderer + set urls "" ; # List of links found + set eims "" ; # List of embedded images + set result "" ; # Tk result + set state T ; # Assume a virtual paragraph in front of the actual data + set count 0 ; # Id counter for page references + set xcount 0 ; # Id counter for bracketed external references + set number 0 ; # Counter for items in enumerated lists + set b 0 ; # State of bold emphasis - 0 = off, 1 = on + set i 0 ; # State of italic emphasis - 0 = off, 1 = on + + foreach {mode text} $s { + switch -exact -- $mode { + {} { + if {$text == {}} {continue} + lappend result $text $tagmap($state$b$i) + } + b - i {set $mode $text ; # text in {0,1}} + g { + set n [incr count] + lappend urls g $n $text + set tags [set base $tagmap($state$b$i)] + lappend tags url g$n + + if {$ip == ""} { + lappend result $text $tags + continue + } + + set info [lindex [$ip $text] 2] + + if {$info == "" || $info == 0} { + lappend result \[ $tags $text $base \] $tags + continue + } + + lappend result $text $tags + } + u { + set n [incr count] + lappend urls u $n $text + + set tags $tagmap($state$b$i) + if {[lindex $tags 0] == "fixed"} { + lappend tags urlq u$n + } else { + lappend tags url u$n + } + + lappend result $text $tags + } + x { + # support embedded images if present in "images" view + set iseq "" + if {[regexp {\.(gif|jpg|png)$} $text - ifmt]} { + set iseq [mk::select wdb.images url $text -count 1] + if {$iseq != "" && [info commands eim_$iseq] == ""} { + if {$ifmt == "jpg"} { set ifmt jpeg } + catch { package require tkimg::$ifmt } + catch { + image create photo eim_$iseq -format $ifmt \ + -data [mk::get wdb.images!$iseq image] + } + } + } + if {[info commands eim_$iseq] != ""} { + #puts "-> $xcount $text" + lappend result " " eim_$iseq + lappend eims eim_$iseq + } else { + set n [incr xcount] + lappend urls x $n $text + + set tags [set base $tagmap($state$b$i)] + lappend tags url x$n + lappend result \[ $base $n $tags \] $base + } + } + Q { + set number 0 ;# reset counter for items in enumerated lists + # use the body tag for the space before a quoted string + # so the don't get a gray background. + lappend result $vspace($state$mode) $tagmap(T00) + set state $mode + } + T - I - D { + set number 0 ;# reset counter for items in enumerated lists + lappend result $vspace($state$mode) $tagmap(${mode}00) + set state $mode + } + U { + lappend result \ + "$vspace($state$mode) *\t" $tagmap(${mode}00) + set state $mode + } + O { + lappend result \ + "$vspace($state$mode) [incr number].\t" $tagmap(${mode}00) + set state $mode + } + H { + lappend result \ + $vspace($state$mode) $tagmap(T00) \ + \t $tagmap(Hxx) \ + \n $tagmap(H00) + set state $mode + } + } + } + + list [lappend result "" body] $urls $eims + } + + # Map from the tagcodes used in StreamToTk above to the taglist + # used in the text widget the generated text will be inserted into. + + variable tagmap + array set tagmap { + T00 body T01 {body i} T10 {body b} T11 {body bi} + Q00 fixed Q01 {fixed i} Q10 {fixed b} Q11 {fixed bi} + H00 thin H01 {thin i} H10 {thin b} H11 {thin bi} + U00 ul U01 {ul i} U10 {ul b} U11 {ul bi} + O00 ol O01 {ol i} O10 {ol b} O11 {ol bi} + I00 dt I01 {dt i} I10 {dt b} I11 {dt bi} + D00 dl D01 {dl i} D10 {dl b} D11 {dl bi} + Hxx {hr thin} + } + + # Define amount of vertical space used between each logical section of text. + # | Current (. <=> 1) + # Last | T Q U O I D H + # ----------+---------------------- + # Text T | 2 2 2 2 2 1 2 + # Quote Q | 2 1 2 2 2 1 3 + # Bullet U | 2 2 1 1 1 1 2 + # Enum O | 2 2 1 1 1 1 2 + # Term I | 2 2 1 1 1 1 2 + # T/def D | 2 2 1 1 1 1 2 + # HRULE H | 1 1 1 1 1 1 2 + # ----------+---------------------- + + variable vspace + proc vs {last current dummy n} { + variable vspace + set vspace($last$current) [string repeat \n $n] + return + } + vs T T --- 2 ;vs T Q --- 2 ;vs T U --- 2 ;vs T O --- 2 ;vs T I --- 2 ;vs T D --- 1 ;vs T H --- 2 + vs Q T --- 2 ;vs Q Q --- 1 ;vs Q U --- 2 ;vs Q O --- 2 ;vs Q I --- 2 ;vs Q D --- 1 ;vs Q H --- 3 + vs U T --- 2 ;vs U Q --- 2 ;vs U U --- 1 ;vs U O --- 1 ;vs U I --- 1 ;vs U D --- 1 ;vs U H --- 2 + vs O T --- 2 ;vs O Q --- 2 ;vs O U --- 1 ;vs O O --- 1 ;vs O I --- 1 ;vs O D --- 1 ;vs O H --- 2 + vs I T --- 2 ;vs I Q --- 2 ;vs I U --- 1 ;vs I O --- 1 ;vs I I --- 1 ;vs I D --- 1 ;vs I H --- 2 + vs D T --- 2 ;vs D Q --- 2 ;vs D U --- 1 ;vs D O --- 1 ;vs D I --- 1 ;vs D D --- 1 ;vs D H --- 2 + vs H T --- 1 ;vs H Q --- 1 ;vs H U --- 1 ;vs H O --- 1 ;vs H I --- 1 ;vs H D --- 1 ;vs H H --- 2 + #rename vs {} + + # ========================================================================= + # ========================================================================= + + ### Backend renderer :: Stream ==> HTML ### + + # ========================================================================= + # ========================================================================= + + # Output specific conversion. Takes a token stream and converts this + # into HTML. The result is a 2-element list. The first element is the + # HTML to render. The second element is alist of triplets listing all + # references found in the stream (each triplet consists reference + # type, page-local numeric id and reference text). + + proc StreamToHTML {s {cgi ""} {ip ""}} { + set result "" + set state H ; # bogus hline as initial state. + set vstate "" ; # Initial state of visual FSM + set count 0 + variable html_frag + foreach {mode text} $s { + switch -exact -- $mode { + {} {append result [quote $text]} + b - i {append result $html_frag($mode$text)} + g { + if {$cgi == ""} { + append result "\[[quote $text]\]" + continue + } + if {$ip == ""} { + # no lookup, turn into a searchreference + append result \ + $html_frag(a_) $cgi$text $html_frag(tc) \ + [quote $text] $html_frag(_a) + continue + } + set info [$ip $text] + ns_log notice "info is $info" + foreach {id name date} $info break + + if {$id == ""} { + # not found, don't turn into an URL + append result "\[[quote $text]\]" + continue + } + + regsub {^/} $id {} id + if {$date > 0} { + # exists, use ID + append result \ + $html_frag(a_) $id $html_frag(tc) \ + [quote $text] $html_frag(_a) + continue + } + + # missing, use ID -- editor link on the brackets. + append result \ + $html_frag(a_) $id $html_frag(tc) \[ $html_frag(_a) \ + [quote $text] \ + $html_frag(a_) $id $html_frag(tc) \] $html_frag(_a) \ + } + u { + append result \ + $html_frag(a_) $text $html_frag(tc) \ + [quote $text] $html_frag(_a) + } + x { + if {[regexp {\.(gif|jpg|png)$} $text]} { + append result $html_frag(i_) $text $html_frag(tc) + } else { + append result \ + \[ $html_frag(a_) $text $html_frag(tc) \ + [incr count] $html_frag(_a) \] + } + } + T - Q - I - D - U - O - H { + append result $html_frag($state$mode) + set state $mode + } + } + } + # Close off the last section. + append result $html_frag(${state}_) + # Get rid of spurious newline at start of each quoted area. + regsub -all "
\n" $result "
" result
+	list $result {}
+    }
+
+    proc quote {q} {
+	regsub -all {&} $q {\&}  q
+	regsub -all {"} $q {\"} q ; # "
+	regsub -all {<} $q {\<}   q
+	regsub -all {>} $q {\>}   q
+	regsub -all {&(#\d+;)} $q {\&\1}   q
+	return $q
+    }
+
+    # Define inter-section tagging, logical vertical space used between each logical section of text.
+    #		| Current              (. <=> 1)
+    #  Last	| T  Q  U  O  I  D  H
+    # ----------+----------------------
+    #  Text   T | See below
+    #  Quote  Q | 
+    #  Bullet U | 
+    #  Enum   O | 
+    #  Term   I | 
+    #  T/def  D | 
+    #  HRULE  H | 
+    # ----------+----------------------
+
+    variable  html_frag
+    proc vs {last current text} {
+	variable html_frag
+	set      html_frag($last$current) $text
+	return
+    }
+
+    vs T T   

;vs T Q

 ;vs T U   

  • ;vs T O

    1. + vs Q T

;vs Q Q \n ;vs Q U

  • ;vs Q O
  1. + vs U T

    ;vs U Q

     ;vs U U         \n
  2. ;vs U O
    1. + vs O T

    ;vs O Q

 ;vs O U  
  • ;vs O O \n
  • + vs I T

    ;vs I Q

     ;vs I U  
    • ;vs I O
      1. + vs D T

        ;vs D Q

         ;vs D U  
        • ;vs D O
          1. + vs H T

            ;vs H Q

             ;vs H U       
            • ;vs H O
              1. + + vs T I

                ;vs T D

                ;vs T H "


                " ;vs T _

                + vs Q I
            ;vs Q D
        ;vs Q H "

    " ;vs Q _
+ vs U I
;vs U D
;vs U H "
" ;vs U _ + vs O I
;vs O D
;vs O H "
" ;vs O _ + vs I I
;vs I D
;vs I H "

" ;vs I _
+ vs D I
;vs D D
;vs D H "

" ;vs D _
+ vs H I
;vs H D
;vs H H "
" ;vs H _ {} + #rename vs {} + + array set html_frag { + a_ { + tc {">} i1 + } ; # " + + # ========================================================================= + # ========================================================================= + + ### Backend renderer :: Stream ==> Refs ### + + # ========================================================================= + # ========================================================================= + + # Output specific conversion. Extracts all wiki internal page references + # from the token stream and returns them as a list of page id's. + + proc StreamToRefs {s ip} { + array set pages {} + + foreach {mode text} $s { + if {![string equal $mode g]} {continue} + + set info [$ip $text] + foreach {id name date} $info break + if {$id == ""} {continue} + + regexp {[0-9]+} $id id + set pages($id) "" + } + + array names pages + } + + # Output specific conversion. Extracts all external references + # from the token stream and returns them as a list of urls. + + proc StreamToUrls {s} { + array set urls {} + foreach {mode text} $s { + if {$mode == "u"} { set urls($text) imm } + if {$mode == "x"} { set urls($text) ref } + } + array get urls + } + +} ;# end of namespace Index: openacs-4/packages/wiki/www/index.vuh =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/www/index.vuh,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/www/index.vuh 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1 @@ +rp_internal_redirect /packages/wiki/lib/page Index: openacs-4/packages/wiki/www/new.adp =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/www/new.adp,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/www/new.adp 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1 @@ + \ No newline at end of file Index: openacs-4/packages/wiki/www/page.adp =================================================================== RCS file: /usr/local/cvsroot/openacs-4/packages/wiki/www/page.adp,v diff -u -N --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ openacs-4/packages/wiki/www/page.adp 4 Jan 2005 18:01:18 -0000 1.1 @@ -0,0 +1,7 @@ + + @title@ + @header_stuff@ + @context@ + @focus@ + +@content;noquote@ \ No newline at end of file