File: backend.php

package info (click to toggle)
phpwiki 1.3.14-3
  • links: PTS
  • area: main
  • in suites: lenny
  • size: 15,716 kB
  • ctags: 23,548
  • sloc: php: 88,295; sql: 1,476; sh: 1,378; perl: 765; makefile: 602; awk: 28
file content (785 lines) | stat: -rwxr-xr-x 26,959 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
<?php // -*-php-*-
rcs_id('$Id: backend.php,v 1.33 2007/06/07 21:35:04 rurban Exp $');

/*
  Pagedata

   maintained by WikiPage 
    //:latestversion
    //:deleted (*)     (Set if latest content is empty.)
    //:pagename (*)

    hits
    is_locked

  Versiondata

    %content (?should this be here?)
    _supplanted : Time version ceased to be the current version

    mtime (*)   : Time of version edit.
    orig_mtime
    is_minor_edit (*)
    author      : nominal author
    author_id   : authenticated author
    summary

    //version
    //created (*)
    //%superceded
	
    //:serial

     (types are scalars: strings, ints, bools)
*/     

/**
 * A WikiDB_backend handles the storage and retrieval of data for a WikiDB.
 *
 * A WikiDB_backend handles the storage and retrieval of data for a WikiDB.
 * It does not have to be this way, of course, but the standard WikiDB uses
 * a WikiDB_backend.  (Other WikiDB's could be written which use some other
 * method to access their underlying data store.)
 *
 * The interface outlined here seems to work well with both RDBM based
 * and flat DBM/hash based methods of data storage.
 *
 * Though it contains some default implementation of certain methods,
 * this is an abstract base class.  It is expected that most effificient
 * backends will override nearly all the methods in this class.
 *
 * @access protected
 * @see WikiDB
 */
class WikiDB_backend
{
    /**
     * Get page meta-data from database.
     *
     * @param $pagename string Page name.
     * @return hash
     * Returns a hash containing the page meta-data.
     * Returns an empty array if there is no meta-data for the requested page.
     * Keys which might be present in the hash are:
     * <dl>
     *  <dt> locked  <dd> If the page is locked.
     *  <dt> hits    <dd> The page hit count.
     *  <dt> created <dd> Unix time of page creation. (FIXME: Deprecated: I
     *                    don't think we need this...) 
     * </dl>
     */
    function get_pagedata($pagename) {
        trigger_error("virtual", E_USER_ERROR);
    }

    /**
     * Update the page meta-data.
     *
     * Set page meta-data.
     *
     * Only meta-data whose keys are preset in $newdata is affected.
     *
     * For example:
     * <pre>
     *   $backend->update_pagedata($pagename, array('locked' => 1)); 
     * </pre>
     * will set the value of 'locked' to 1 for the specified page, but it
     * will not affect the value of 'hits' (or whatever other meta-data
     * may have been stored for the page.)
     *
     * To delete a particular piece of meta-data, set it's value to false.
     * <pre>
     *   $backend->update_pagedata($pagename, array('locked' => false)); 
     * </pre>
     *
     * @param $pagename string Page name.
     * @param $newdata hash New meta-data.
     */
    function update_pagedata($pagename, $newdata) {
        trigger_error("virtual", E_USER_ERROR);
    }
    

    /**
     * Get the current version number for a page.
     *
     * @param $pagename string Page name.
     * @return int The latest version number for the page.  Returns zero if
     *  no versions of a page exist.
     */
    function get_latest_version($pagename) {
        trigger_error("virtual", E_USER_ERROR);
    }
    
    /**
     * Get preceding version number.
     *
     * @param $pagename string Page name.
     * @param $version int Find version before this one.
     * @return int The version number of the version in the database which
     *  immediately preceeds $version.
     */
    function get_previous_version($pagename, $version) {
        trigger_error("virtual", E_USER_ERROR);
    }
    
    /**
     * Get revision meta-data and content.
     *
     * @param $pagename string Page name.
     * @param $version integer Which version to get.
     * @param $want_content boolean
     *  Indicates the caller really wants the page content.  If this
     *  flag is not set, the backend is free to skip fetching of the
     *  page content (as that may be expensive).  If the backend omits
     *  the content, the backend might still want to set the value of
     *  '%content' to the empty string if it knows there's no content.
     *
     * @return hash The version data, or false if specified version does not
     *    exist.
     *
     * Some keys which might be present in the $versiondata hash are:
     * <dl>
     * <dt> %content
     *  <dd> This is a pseudo-meta-data element (since it's actually
     *       the page data, get it?) containing the page content.
     *       If the content was not fetched, this key may not be present.
     * </dl>
     * For description of other version meta-data see WikiDB_PageRevision::get().
     * @see WikiDB_PageRevision::get
     */
    function get_versiondata($pagename, $version, $want_content = false) {
        trigger_error("virtual", E_USER_ERROR);
    }

    /**
     * Delete page from the database with backup possibility.
     * This should remove all links (from the named page) from
     * the link database.
     *
     * @param $pagename string Page name.
     * i.e save_page('') and DELETE nonempty id
     * Can be undone and is seen in RecentChanges.
     */
    function delete_page($pagename) {
        $mtime = time();
        $user =& $GLOBALS['request']->_user;
        $vdata = array('author' => $user->getId(),
                       'author_id' => $user->getAuthenticatedId(),
                       'mtime' => $mtime);

        $this->lock(); // critical section:
        $version = $this->get_latest_version($pagename);
        $this->set_versiondata($pagename, $version+1, $vdata);
        $this->set_links($pagename, false); // links are purged.
        // SQL needs to invalidate the non_empty id
        if (! WIKIDB_NOCACHE_MARKUP) {
            // need the hits, perms and LOCKED, otherwise you can reset the perm 
            // by action=remove and re-create it with default perms
            $pagedata = $this->get_pagedata($pagename); 
            unset($pagedata['_cached_html']);
            $this->update_pagedata($pagename, $pagedata);
        }
        $this->unlock();
    }

    /**
     * Delete page (and all it's revisions) from the database.
     *
     */
    function purge_page($pagename) {
        trigger_error("virtual", E_USER_ERROR);
    }

    /**
     * Delete an old revision of a page.
     *
     * Note that one is never allowed to delete the most recent version,
     * but that this requirement is enforced by WikiDB not by the backend.
     *
     * In fact, to be safe, backends should probably allow the deletion of
     * the most recent version.
     *
     * @param $pagename string Page name.
     * @param $version integer Version to delete.
     */
    function delete_versiondata($pagename, $version) {
        trigger_error("virtual", E_USER_ERROR);
    }

    /**
     * Create a new page revision.
     *
     * If the given ($pagename,$version) is already in the database,
     * this method completely overwrites any stored data for that version.
     *
     * @param $pagename string Page name.
     * @param $version int New revisions content.
     * @param $data hash New revision metadata.
     *
     * @see get_versiondata
     */
    function set_versiondata($pagename, $version, $data) {
        trigger_error("virtual", E_USER_ERROR);
    }

    /**
     * Update page version meta-data.
     *
     * If the given ($pagename,$version) is already in the database,
     * this method only changes those meta-data values whose keys are
     * explicity listed in $newdata.
     *
     * @param $pagename string Page name.
     * @param $version int New revisions content.
     * @param $newdata hash New revision metadata.
     * @see set_versiondata, get_versiondata
     */
    function update_versiondata($pagename, $version, $newdata) {
        $data = $this->get_versiondata($pagename, $version, true);
        if (!$data) {
            assert($data);
            return;
        }
        foreach ($newdata as $key => $val) {
            if (empty($val))
                unset($data[$key]);
            else
                $data[$key] = $val;
        }
        $this->set_versiondata($pagename, $version, $data);
    }
    
    /**
     * Set links for page.
     *
     * @param $pagename string Page name.
     *
     * @param $links array List of page(names) which page links to.
     */
    function set_links($pagename, $links) {
        trigger_error("virtual", E_USER_ERROR);
    }
        
    /**
     * Find pages which link to or are linked from a page.
     *
     * @param $pagename string Page name.
     * @param $reversed boolean True to get backlinks.
     *
     * FIXME: array or iterator?
     * @return object A WikiDB_backend_iterator.
     */
    function get_links($pagename, $reversed, $include_empty=false,
                       $sortby='', $limit='', $exclude='') {
        //FIXME: implement simple (but slow) link finder.
        die("FIXME get_links");
    }

    /**
     * Get all revisions of a page.
     *
     * @param $pagename string The page name.
     * @return object A WikiDB_backend_iterator.
     */
    function get_all_revisions($pagename) {
        include_once('lib/WikiDB/backend/dumb/AllRevisionsIter.php');
        return new WikiDB_backend_dumb_AllRevisionsIter($this, $pagename);
    }
    
    /**
     * Get all pages in the database.
     *
     * Pages should be returned in alphabetical order if that is
     * feasable.
     *
     * @access protected
     *
     * @param $include_defaulted boolean
     * If set, even pages with no content will be returned
     * --- but still only if they have at least one revision (not
     * counting the default revision 0) entered in the database.
     *
     * Normally pages whose current revision has empty content
     * are not returned as these pages are considered to be
     * non-existing.
     *
     * @return object A WikiDB_backend_iterator.
     */
    function get_all_pages($include_defaulted, $orderby=false, $limit='', $exclude='') {
        trigger_error("virtual", E_USER_ERROR);
    }
        
    /**
     * Title or full text search.
     *
     * Pages should be returned in alphabetical order if that is
     * feasable.
     *
     * @access protected
     *
     * @param $search object A TextSearchQuery object describing the parsed query string, 
     *                       with efficient methods for SQL and PCRE match.
     *
     * @param $fullsearch boolean If true, a full text search is performed,
     *  otherwise a title search is performed.
     *
     * @return object A WikiDB_backend_iterator.
     *
     * @see WikiDB::titleSearch
     */
    function text_search($search, $fulltext=false, $sortby='', 
			 $limit='', $exclude='') 
    {
        // This is method implements a simple linear search
        // through all the pages in the database.
        //
        // It is expected that most backends will overload
        // this method with something more efficient.
        include_once('lib/WikiDB/backend/dumb/TextSearchIter.php');
        // ignore $limit
        $pages = $this->get_all_pages(false, $sortby, false, $exclude);
        return new WikiDB_backend_dumb_TextSearchIter($this, $pages, $search, $fulltext, 
                                                      array('limit' => $limit, 
                                                            'exclude' => $exclude));
    }


    /**
     *
     * @access protected
     * @param $pages     object A TextSearchQuery object.
     * @param $linkvalue object A TextSearchQuery object for the linkvalues 
     *                          (linkto, relation or backlinks or attribute values).
     * @param $linktype  string One of the 4 linktypes.
     * @param $relation  object A TextSearchQuery object or false.
     * @param $options   array Currently ignored. hash of sortby, limit, exclude.
     * @return object A WikiDB_backend_iterator.
     * @see WikiDB::linkSearch
     */
    function link_search( $pages, $linkvalue, $linktype, $relation=false, $options=array() ) {
        include_once('lib/WikiDB/backend/dumb/LinkSearchIter.php');
        $pageiter = $this->text_search($pages);
        return new WikiDB_backend_dumb_LinkSearchIter($this, $pageiter, $linkvalue, $linktype, $relation, $options);
    }

    /**
     * Find pages with highest hit counts.
     *
     * Find the pages with the highest hit counts.  The pages should
     * be returned in reverse order by hit count.
     *
     * @access protected
     * @param integer $limit No more than this many pages
     * @return object A WikiDB_backend_iterator.
     */
    function most_popular($limit, $sortby='-hits') {
        // This is method fetches all pages, then
        // sorts them by hit count.
        // (Not very efficient.)
        //
        // It is expected that most backends will overload
        // method with something more efficient.
        include_once('lib/WikiDB/backend/dumb/MostPopularIter.php');
        $pages = $this->get_all_pages(false, $sortby, false);
        return new WikiDB_backend_dumb_MostPopularIter($this, $pages, $limit);
    }

    /**
     * Find recent changes.
     *
     * @access protected
     * @param $params hash See WikiDB::mostRecent for a description
     *  of parameters which can be included in this hash.
     * @return object A WikiDB_backend_iterator.
     * @see WikiDB::mostRecent
     */
    function most_recent($params) {
        // This method is very inefficient and searches through
        // all pages for the most recent changes.
        //
        // It is expected that most backends will overload
        // method with something more efficient.
        include_once('lib/WikiDB/backend/dumb/MostRecentIter.php');
        $pages = $this->get_all_pages(true, '-mtime');
        return new WikiDB_backend_dumb_MostRecentIter($this, $pages, $params);
    }

    function wanted_pages($exclude_from='', $exclude='', $sortby='', $limit='') {
        include_once('lib/WikiDB/backend/dumb/WantedPagesIter.php');
        $allpages = $this->get_all_pages(true,false,false,$exclude_from);
        return new WikiDB_backend_dumb_WantedPagesIter($this, $allpages, $exclude, $sortby, $limit);
    }

    /**
     * Lock backend database.
     *
     * Calls may be nested.
     *
     * @param $write_lock boolean Unless this is set to false, a write lock
     *     is acquired, otherwise a read lock.  If the backend doesn't support
     *     read locking, then it should make a write lock no matter which type
     *     of lock was requested.
     *
     *     All backends <em>should</em> support write locking.
     */
    function lock($write_lock = true) {
    }

    /**
     * Unlock backend database.
     *
     * @param $force boolean Normally, the database is not unlocked until
     *  unlock() is called as many times as lock() has been.  If $force is
     *  set to true, the the database is unconditionally unlocked.
     */
    function unlock($force = false) {
    }


    /**
     * Close database.
     */
    function close () {
    }

    /**
     * Synchronize with filesystem.
     *
     * This should flush all unwritten data to the filesystem.
     */
    function sync() {
    }

    /**
     * Optimize the database.
     */
    function optimize() {
    }

    /**
     * Check database integrity.
     *
     * This should check the validity of the internal structure of the database.
     * Errors should be reported via:
     * <pre>
     *   trigger_error("Message goes here.", E_USER_WARNING);
     * </pre>
     *
     * @return boolean True iff database is in a consistent state.
     */
    function check() {
    }

    /**
     * Put the database into a consistent state 
     * by reparsing and restoring all pages.
     *
     * This should put the database into a consistent state.
     * (I.e. rebuild indexes, etc...)
     *
     * @return boolean True iff successful.
     */
    function rebuild() {
	global $request;
	$dbh = $request->getDbh();
    	$iter = $dbh->getAllPages(false);
        while ($page = $iter->next()) {
	    $current = $page->getCurrentRevision(true);
	    $pagename = $page->getName();
	    $meta = $current->_data;
	    $version = $current->getVersion();
	    $content =& $meta['%content'];
	    $formatted = new TransformedText($page, $content, $current->getMetaData());
	    $type = $formatted->getType();
	    $meta['pagetype'] = $type->getName();
	    $links = $formatted->getWikiPageLinks(); // linkto => relation
	    $this->lock(array('version','page','recent','link','nonempty'));
	    $this->set_versiondata($pagename, $version, $meta);
	    $this->set_links($pagename, $links);
	    $this->unlock(array('version','page','recent','link','nonempty'));
        }
    }

    function _parse_searchwords($search) {
        $search = strtolower(trim($search));
        if (!$search)
            return array(array(),array());
        
        $words = preg_split('/\s+/', $search);
        $exclude = array();
        foreach ($words as $key => $word) {
            if ($word[0] == '-' && $word != '-') {
                $word = substr($word, 1);
                $exclude[] = preg_quote($word);
                unset($words[$key]);
            }
        }
        return array($words, $exclude);
    }

    /** 
     * Split the given limit parameter into offset,limit. (offset is optional. default: 0)
     * Duplicate the PageList function here to avoid loading the whole PageList.php 
     * Usage: 
     *   list($offset,$count) = $this->limit($args['limit']);
     */
    function limit($limit) {
        if (strstr($limit, ','))
            return split(',', $limit);
        else
            return array(0, $limit);
    }
    
    /** 
     * Handle sortby requests for the DB iterator and table header links.
     * Prefix the column with + or - like "+pagename","-mtime", ...
     * supported actions: 'flip_order' "mtime" => "+mtime" => "-mtime" ...
     *                    'db'         "-pagename" => "pagename DESC"
     * In PageList all columns are sortable. (patch by DanFr)
     * Here with the backend only some, the rest is delayed to PageList.
     * (some kind of DumbIter)
     * Duplicate the PageList function here to avoid loading the whole 
     * PageList.php, and it forces the backend specific sortable_columns()
     */
    function sortby ($column, $action, $sortable_columns=false) {
        if (empty($column)) return '';
        //support multiple comma-delimited sortby args: "+hits,+pagename"
        if (strstr($column, ',')) {
            $result = array();
            foreach (explode(',', $column) as $col) {
                if (empty($this))
                    $result[] = WikiDB_backend::sortby($col, $action);
                else
                    $result[] = $this->sortby($col, $action);
            }
            return join(",",$result);
        }
        if (substr($column,0,1) == '+') {
            $order = '+'; $column = substr($column,1);
        } elseif (substr($column,0,1) == '-') {
            $order = '-'; $column = substr($column,1);
        }
        // default order: +pagename, -mtime, -hits
        if (empty($order))
            if (in_array($column,array('mtime','hits')))
                $order = '-';
            else
                $order = '+';
        if ($action == 'flip_order') {
            return ($order == '+' ? '-' : '+') . $column;
        } elseif ($action == 'init') {
            $this->_sortby[$column] = $order;
            return $order . $column;
        } elseif ($action == 'check') {
            return (!empty($this->_sortby[$column]) or 
                    ($GLOBALS['request']->getArg('sortby') and 
                     strstr($GLOBALS['request']->getArg('sortby'),$column)));
        } elseif ($action == 'db') {
            // native sort possible?
            if (!empty($this) and !$sortable_columns)
                $sortable_columns = $this->sortable_columns();
            if (in_array($column, $sortable_columns))
                // asc or desc: +pagename, -pagename
                return $column . ($order == '+' ? ' ASC' : ' DESC');
            else 
                return '';
        }
        return '';
    }

    function sortable_columns() {
        return array('pagename'/*,'mtime','author_id','author'*/);
    }

    // adds surrounding quotes 
    function quote ($s) { return "'".$s."'"; }
    // no surrounding quotes because we know it's a string
    function qstr ($s)  { return $s; }

    function isSQL () {
        return in_array(DATABASE_TYPE, array('SQL','ADODB','PDO'));
    }

    function write_accesslog(&$entry) {
        global $request;
        $dbh = &$this->_dbh;
        $log_tbl = $entry->_accesslog->logtable;
        // duration problem: sprintf "%f" might use comma e.g. "100,201" in european locales
        $dbh->query("INSERT INTO $log_tbl"
                    . " (time_stamp,remote_host,remote_user,request_method,request_line,request_uri,"
                    .   "request_args,request_time,status,bytes_sent,referer,agent,request_duration)"
                    . " VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)",
                    array(
                          // Problem: date formats are backend specific. Either use unixtime as %d (long),
                          // or the native timestamp format.
                          $entry->time,
                          $entry->host, 
                          $entry->user,
                          $entry->request_method, 
                          $entry->request, 
                          $entry->request_uri,    
                          $entry->request_args,
                          $entry->_ncsa_time($entry->time), 
                          $entry->status, 
                          (int)$entry->size,
                          $entry->referer,
                          $entry->user_agent,
                          $entry->duration));
    }
};

/**
 * Iterator returned by backend methods which (possibly) return
 * multiple records.
 *
 * FIXME: This might be two seperate classes: page_iter and version_iter.
 * For the versions we have WikiDB_backend_dumb_AllRevisionsIter.
 */
class WikiDB_backend_iterator
{
    /**
     * Get the next record in the iterator set.
     *
     * This returns a hash. The hash may contain the following keys:
     * <dl>
     * <dt> pagename <dt> (string) the page name or linked page name on link iterators
     * <dt> version  <dt> (int) the version number
     * <dt> pagedata <dt> (hash) page meta-data (as returned from backend::get_pagedata().)
     * <dt> versiondata <dt> (hash) page meta-data (as returned from backend::get_versiondata().)
     * <dt> linkrelation <dt> (string) the page naming the relation (e.g. isa:=page <=> isa)
     *
     * If this is a page iterator, it must contain the 'pagename' entry --- the others
     * are optional.
     *
     * If this is a version iterator, the 'pagename', 'version', <strong>and</strong> 'versiondata'
     * entries are mandatory.  ('pagedata' is optional.)
     *
     * If this is a link iterator, the 'pagename' is mandatory, 'linkrelation' is optional.
     */
    function next() {
        trigger_error("virtual", E_USER_ERROR);
    }

    function count() {
    	if (!empty($this->_pages))
	    return count($this->_pages);
	else
	    return 0;    
    }

    function asArray() {
    	if (!empty($this->_pages)) {
            reset($this->_pages);
            return $this->_pages;
    	} else {
    	    $result = array();
    	    while ($page = $this->next())
    	        $result[] = $page;
            return $result;
    	}
    }

    /**
     * Release resources held by this iterator.
     */
    function free() {
    }
};

/**
 * search baseclass, pcre-specific
 */
class WikiDB_backend_search
{
    function WikiDB_backend_search($search, &$dbh) {
        $this->_dbh = $dbh;
        $this->_case_exact = $search->_case_exact;
        $this->_stoplist   =& $search->_stoplist;
        $this->stoplisted = array();
    }
    function _quote($word) {
        return preg_quote($word, "/");
    }
    //TODO: use word anchors
    function EXACT($word) { return "^".$this->_quote($word)."$"; }
    function STARTS_WITH($word) { return "^".$this->_quote($word); }
    function ENDS_WITH($word) { return $this->_quote($word)."$"; }
    function WORD($word) { return $this->_quote($word); }
    function REGEX($word) { return $word; }
    //TESTME
    function _pagename_match_clause($node) {
        $method = $node->op;
        $word = $this->$method($node->word);
        return "preg_match(\"/\".$word.\"/\"".($this->_case_exact ? "i":"").")";
    }
    /* Eliminate stoplist words.
     *  Keep a list of Stoplisted words to inform the poor user. 
     */
    function isStoplisted ($node) {
    	// check only on WORD or EXACT fulltext search
    	if ($node->op != 'WORD' and $node->op != 'EXACT')
    	    return false;
        if (preg_match("/^".$this->_stoplist."$/i", $node->word)) {
            array_push($this->stoplisted, $node->word);
            return true;
        }
        return false;
    }
    function getStoplisted($word) {
        return $this->stoplisted;
    }
}

/**
 * search baseclass, sql-specific
 */
class WikiDB_backend_search_sql extends WikiDB_backend_search
{
    function _pagename_match_clause($node) {
        // word already quoted by TextSearchQuery_node_word::_sql_quote()
        $word = $node->sql();
        if ($word == '%') // ALL shortcut
            return "1=1";
        else
            return ($this->_case_exact 
                    ? "pagename LIKE '$word'"
                    : "LOWER(pagename) LIKE '$word'");
    }
    function _fulltext_match_clause($node) {
        // force word-style %word% for fulltext search
        $word = '%' . $node->_sql_quote($node->word) . '%';
        // eliminate stoplist words
        if ($this->isStoplisted($node))
            return "1=1";  // and (pagename or 1) => and 1
        else
            return $this->_pagename_match_clause($node)
                // probably convert this MATCH AGAINST or SUBSTR/POSITION without wildcards
                . ($this->_case_exact ? " OR content LIKE '$word'" 
                                      : " OR LOWER(content) LIKE '$word'");
    }
}

// $Log: backend.php,v $
// Revision 1.33  2007/06/07 21:35:04  rurban
// fixed backend asArray access to iterators (DebugInfo with SQL)
//
// Revision 1.32  2007/02/17 14:14:41  rurban
// enforce accesslog types
//
// Revision 1.31  2007/01/28 22:49:55  rurban
// use backend specific SQL write_accesslog
//
// Revision 1.30  2007/01/02 13:20:26  rurban
// added link_search. Clarify API: sortby,limit and exclude are strings.
//

// For emacs users
// Local Variables:
// mode: php
// tab-width: 8
// c-basic-offset: 4
// c-hanging-comment-ender-p: nil
// indent-tabs-mode: nil
// End:
?>