File: matching.c

package info (click to toggle)
pg-similarity 1.0-9
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 576 kB
  • sloc: ansic: 3,257; sql: 517; makefile: 25; sh: 1
file content (162 lines) | stat: -rw-r--r-- 3,451 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
/*----------------------------------------------------------------------------
 *
 * matching.c
 *
 * The Matching Coefficient is a simple vector based approach
 *
 *          nt
 * s = -------------
 *      max(nx, ny)
 *
 * where nt is the number of common n-grams found in both strings, nx is the
 * number of n-grams in x and, ny is the number of n-grams in y.
 *
 * For example:
 *
 * x: euler = {e, u, l, e, r}
 * y: heuser = {h, e, u, s, e, r}
 *
 *      4
 * s = --- = 0.666...
 *      6
 *
 * PS> we call n-grams: (i) n-sequence of letters (ii) n-sequence of words
 *
 *
 * Copyright (c) 2008-2018, Euler Taveira de Oliveira
 *
 *----------------------------------------------------------------------------
 */

#include "similarity.h"
#include "tokenizer.h"


/* GUC variables */
int		pgs_matching_tokenizer = PGS_UNIT_ALNUM;
double	pgs_matching_threshold = 0.7;
bool	pgs_matching_is_normalized = true;

PG_FUNCTION_INFO_V1(matchingcoefficient);

Datum
matchingcoefficient(PG_FUNCTION_ARGS)
{
	char		*a, *b;
	TokenList	*s, *t;
	Token		*p, *q;
	int		atok, btok, comtok, maxtok;
	float8		res;

	a = DatumGetPointer(DirectFunctionCall1(textout, PointerGetDatum(PG_GETARG_TEXT_P(0))));
	b = DatumGetPointer(DirectFunctionCall1(textout, PointerGetDatum(PG_GETARG_TEXT_P(1))));

	if (strlen(a) > PGS_MAX_STR_LEN || strlen(b) > PGS_MAX_STR_LEN)
		ereport(ERROR,
				(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
				errmsg("argument exceeds the maximum length of %d bytes",
					PGS_MAX_STR_LEN)));

	/* lists */
	s = initTokenList(0);
	t = initTokenList(0);

	switch (pgs_matching_tokenizer)
	{
		case PGS_UNIT_WORD:
			tokenizeBySpace(s, a);
			tokenizeBySpace(t, b);
			break;
		case PGS_UNIT_GRAM:
			tokenizeByGram(s, a);
			tokenizeByGram(t, b);
			break;
		case PGS_UNIT_CAMELCASE:
			tokenizeByCamelCase(s, a);
			tokenizeByCamelCase(t, b);
			break;
		case PGS_UNIT_ALNUM:
		default:
			tokenizeByNonAlnum(s, a);
			tokenizeByNonAlnum(t, b);
			break;
	}

	elog(DEBUG3, "Token List A");
	printToken(s);
	elog(DEBUG3, "Token List B");
	printToken(t);

	atok = s->size;
	btok = t->size;
	maxtok = max2(atok, btok);

	comtok = 0;

	/*
	 * XXX consider sorting s and t when we're dealing with large lists?
	 */
	p = s->head;
	while (p != NULL)
	{
		int found = 0;

		q = t->head;
		while (q != NULL)
		{
			elog(DEBUG3, "p: %s; q: %s", p->data, q->data);
			if (strcmp(p->data, q->data) == 0)
			{
				found = 1;
				break;
			}
			q = q->next;
		}

		if (found)
		{
			comtok++;
			elog(DEBUG2, "\"%s\" found; comtok = %d", p->data, comtok);
		}

		p = p->next;
	}

	destroyTokenList(s);
	destroyTokenList(t);

	elog(DEBUG1, "is normalized: %d", pgs_matching_is_normalized);
	elog(DEBUG1, "common tokens size: %d", comtok);
	elog(DEBUG1, "maximum token size: %d", maxtok);

	if (pgs_matching_is_normalized)
		res = (float8) comtok / maxtok;
	else
		res = comtok;

	PG_RETURN_FLOAT8(res);
}

PG_FUNCTION_INFO_V1(matchingcoefficient_op);

Datum matchingcoefficient_op(PG_FUNCTION_ARGS)
{
	float8	res;

	/*
	 * store *_is_normalized value temporarily 'cause
	 * threshold (we're comparing against) is normalized
	 */
	bool	tmp = pgs_matching_is_normalized;
	pgs_matching_is_normalized = true;

	res = DatumGetFloat8(DirectFunctionCall2(
					matchingcoefficient,
					PG_GETARG_DATUM(0),
					PG_GETARG_DATUM(1)));

	/* we're done; back to the previous value */
	pgs_matching_is_normalized = tmp;

	PG_RETURN_BOOL(res >= pgs_matching_threshold);
}