aboutsummaryrefslogtreecommitdiffstats
path: root/mail/e-searching-tokenizer.c
diff options
context:
space:
mode:
authorMatthew Barnes <mbarnes@src.gnome.org>2009-01-12 12:12:01 +0800
committerMatthew Barnes <mbarnes@src.gnome.org>2009-01-12 12:12:01 +0800
commitc7d3c9f95609123035ebaa267f9d2e6ecfa8c2e8 (patch)
tree246bdb714e24e1b0c9a8ce4a3e45a46b230316de /mail/e-searching-tokenizer.c
parentf8b33bc4ebe9dd8043674141b5fe4660efaa99e8 (diff)
downloadgsoc2013-evolution-c7d3c9f95609123035ebaa267f9d2e6ecfa8c2e8.tar
gsoc2013-evolution-c7d3c9f95609123035ebaa267f9d2e6ecfa8c2e8.tar.gz
gsoc2013-evolution-c7d3c9f95609123035ebaa267f9d2e6ecfa8c2e8.tar.bz2
gsoc2013-evolution-c7d3c9f95609123035ebaa267f9d2e6ecfa8c2e8.tar.lz
gsoc2013-evolution-c7d3c9f95609123035ebaa267f9d2e6ecfa8c2e8.tar.xz
gsoc2013-evolution-c7d3c9f95609123035ebaa267f9d2e6ecfa8c2e8.tar.zst
gsoc2013-evolution-c7d3c9f95609123035ebaa267f9d2e6ecfa8c2e8.zip
Merge revisions 36866:37046 from trunk.
svn path=/branches/kill-bonobo/; revision=37050
Diffstat (limited to 'mail/e-searching-tokenizer.c')
-rw-r--r--mail/e-searching-tokenizer.c6
1 files changed, 3 insertions, 3 deletions
diff --git a/mail/e-searching-tokenizer.c b/mail/e-searching-tokenizer.c
index 044c3840b7..172a126c92 100644
--- a/mail/e-searching-tokenizer.c
+++ b/mail/e-searching-tokenizer.c
@@ -45,7 +45,7 @@ enum {
static guint signals[LAST_SIGNAL] = { 0, };
-static void e_searching_tokenizer_begin (HTMLTokenizer *, char *);
+static void e_searching_tokenizer_begin (HTMLTokenizer *, const char *);
static void e_searching_tokenizer_end (HTMLTokenizer *);
static char *e_searching_tokenizer_peek_token (HTMLTokenizer *);
static char *e_searching_tokenizer_next_token (HTMLTokenizer *);
@@ -811,7 +811,7 @@ searcher_next_token(struct _searcher *s)
s->current = token = (struct _token *)e_dlist_remhead(&s->output);
- return token?token->tok:NULL;
+ return token ? g_strdup (token->tok) : NULL;
}
static char *
@@ -1070,7 +1070,7 @@ static char *get_token(HTMLTokenizer *t)
}
static void
-e_searching_tokenizer_begin (HTMLTokenizer *t, char *content_type)
+e_searching_tokenizer_begin (HTMLTokenizer *t, const char *content_type)
{
ESearchingTokenizer *st = E_SEARCHING_TOKENIZER (t);
struct _ESearchingTokenizerPrivate *p = st->priv;