def tokenize
result = self.name.gsub(/[�����]/, 'a')
result.gsub!(/[����]/, 'e')
result.gsub!(/[����]/, 'i')
result.gsub!(/[����]/, 'o')
result.gsub!(/[����]/, 'u')
result.gsub!(/[�Ÿ]/, 'y')
result.gsub!(/[�]/, 'n')
result.gsub!(/[�]/, 'c')
result = result.downcase
result.gsub!(/[áà äâå]/, 'a')
result.gsub!(/[éèëê]/, 'e')
result.gsub!(/[ÃÂìïî]/, 'i')
result.gsub!(/[óòöô]/, 'o')
result.gsub!(/[úùüû]/, 'u')
result.gsub!(/[ýÿ]/, 'y')
result.gsub!(/[ñ]/, 'n')
result.gsub!(/[ç]/, 'c')
result.gsub!(/['"]/, '-')
result.gsub!(/ +/, '-')
result.gsub!(/_/, '-')
result.gsub!(/(_)$/, '-')
result.gsub!(/^(_)/, '-')
result.gsub!(/W+/, '-') # all non-word chars are removed
result.gsub!(/-Z/, '')
self.permalink = result
end