Revision: 376e21f6b3c8
Branch: default
Author: Michael Gasser <
gas...@cs.indiana.edu>
Date: Wed May 14 07:28:54 2014 UTC
Log: LGLP paper draft done; still too long though.
http://code.google.com/p/hltdi-l3/source/detail?r=376e21f6b3c8
Modified:
/hiiktuu.py
/hiiktuu/constraint.py
/hiiktuu/cs.py
/hiiktuu/languages/eng.lg
/hiiktuu/sentence.py
/l3xdg/solver.py
/paperdrafts/lglp/hltdi.bib
/paperdrafts/lglp/lglp14.pdf
/paperdrafts/lglp/lglp14.tex
=======================================
--- /hiiktuu.py Sun May 11 05:20:29 2014 UTC
+++ /hiiktuu.py Wed May 14 07:28:54 2014 UTC
@@ -28,7 +28,7 @@
# 2014.02.09
# -- Created
-__version__ = 0.75
+__version__ = 0.8
import hiiktuu
=======================================
--- /hiiktuu/constraint.py Fri May 9 22:41:32 2014 UTC
+++ /hiiktuu/constraint.py Wed May 14 07:28:54 2014 UTC
@@ -51,6 +51,10 @@
# -- Added Order constraint for ordering indices of sequences, replacing
multiple
# SetPrecedence constraints, and including one additional condition not
in
# SetPrecedence.
+# 2014.05.11
+# -- Complex constraints make selection variables for indices out of main
sel
+# selection variables (groups in Hiiktuu) non-essential once the
constraint
+# is entailed.
from .variable import *
# This is imported in another branch too...
@@ -1591,11 +1595,12 @@
class ComplexConstraint(Constraint):
"""Each value of selection variable (potentially) selects a simple
constraint."""
- def __init__(self, selvar=None, othervars=None,
+ def __init__(self, selvar=None, selvars=None, othervars=None,
problem=None, weight=1, record=True):
- Constraint.__init__(self, [selvar] + othervars, problem=problem,
weight=weight,
+ Constraint.__init__(self, [selvar] + selvars + othervars,
problem=problem, weight=weight,
record=record)
self.selvar = selvar
+ self.selvars = selvars
self.constraints = []
def fails(self, dstore=None):
@@ -1609,10 +1614,18 @@
def is_entailed(self, dstore=None):
"""Is entailed if all of the constraints indexed by the upper
bound of selvar are entailed."""
- for index in self.selvar.get_upper(dstore=dstore):
+ selvar_upper = self.selvar.get_upper(dstore=dstore)
+ for index in selvar_upper:
constraint = self.constraints[index]
if constraint and not constraint.is_entailed(dstore=dstore):
return False
+ # Remove non-indexed selection variables from essential variable
list
+ # if they're there.
+ for index, selvar in enumerate(self.selvars):
+ if index not in selvar_upper:
+ if selvar in dstore.ess_undet:
+# print("Removing {} from DStore essential
variables".format(selvar))
+ dstore.ess_undet.remove(selvar)
return True
def infer(self, dstore=None, verbosity=0, tracevar=[]):
@@ -1646,12 +1659,12 @@
def __init__(self, selvar=None, mainvars=None, seqvars=None,
selvars=None,
problem=None, weight=1, record=True):
- ComplexConstraint.__init__(self, selvar, selvars + seqvars +
mainvars,
+ ComplexConstraint.__init__(self, selvar, selvars, seqvars +
mainvars,
problem=problem, weight=weight,
record=record)
# Constraint.__init__(self, [selvar] + selvars + seqvars + mainvars,
# problem=problem, weight=weight, record=record)
# self.selvar = selvar
- self.selvars = selvars
+# self.selvars = selvars
self.seqvars = seqvars
self.mainvars = mainvars
self.name = '{} = U{} [{}]
[[{}]]'.format(Selection.format_seq(mainvars),
@@ -1714,7 +1727,7 @@
the seqvars."""
def __init__(self, selvar=None, convexvars=None, problem=None,
weight=1, record=True):
- ComplexConstraint.__init__(self, selvar, convexvars,
+ ComplexConstraint.__init__(self, selvar, selvars=convexvars,
othervars=[],
problem=problem, weight=weight,
record=record)
# Constraint.__init__(self, [selvar] + convexvars, problem=problem,
weight=weight,
# record=record)
@@ -2057,7 +2070,7 @@
def __init__(self, selvar=None, featvars=None, selvars=None,
seqvars=None,
problem=None, weight=1, record=True):
- ComplexConstraint.__init__(self, selvar, selvars + seqvars +
featvars,
+ ComplexConstraint.__init__(self, selvar, selvars, seqvars +
featvars,
problem=problem, weight=weight,
record=record)
# Constraint.__init__(self, [selvar] + featvars + selvars + seqvars,
# problem=problem, weight=weight, record=record)
=======================================
--- /hiiktuu/cs.py Fri May 9 22:41:32 2014 UTC
+++ /hiiktuu/cs.py Wed May 14 07:28:54 2014 UTC
@@ -27,8 +27,11 @@
# 2014.04.26
# -- Created
+# 2014.05.11
+# -- SearchState class created, so that Solver doesn't have to do
double-duty.
from .constraint import *
+import queue
class Solver:
"""A solver for a constraint satisfaction problem, actually a state in
the search space."""
@@ -41,23 +44,22 @@
distributable = 3
skipped = 4
- def __init__(self, constraints, dstore, depth=0, name='', parent=None,
+ def __init__(self, constraints, dstore, name='',
description='', verbosity=0):
self.constraints = constraints
self.dstore = dstore
- # Depth of search
- self.depth = depth
# Used in solver's printname
self.description = description
# Solver (state) that generated this one
- self.parent = parent
self.verbosity=verbosity
self.entailed = []
self.failed = []
self.status = Solver.running
self.id = Solver.id
-
self.name = name or "({})={}=v{}".format(description,
self.id,
self.depth)
- self.children = []
+
self.name = name or "({})={}=".format(description,
self.id)
+ self.init_state = SearchState(solver=self, dstore=dstore,
+ constraints=constraints,
+ verbosity=verbosity)
Solver.id += 1
def __repr__(self):
@@ -216,17 +218,17 @@
return Superset(variable, v1, problem=self.problem), \
Subset(variable, v2, problem=self.problem)
- def distribute(self, distributor, project=False, verbosity=0):
- """Creates and returns two new solver objects by cloning the
dstore with the distributor."""
+ def distribute(self, state, project=False, verbosity=0):
+ """Creates and returns two new states by cloning the dstore with
the distributor."""
if self.status != Solver.distributable:
return []
- undet = self.dstore.ess_undet
+ undet = state.dstore.ess_undet
if verbosity:
ndet = len(undet)
print('DISTRIBUTION')
print('Distributing, undetermined vars {}'.format(ndet))
for v in list(undet)[:5]:
- v.pprint(dstore=self.dstore)
+ v.pprint(dstore=state.dstore)
if ndet > 5:
print('...')
# Select a variable and two disjoint basic constraints on it
@@ -237,16 +239,101 @@
# The constraints of the selected variable (make copies)
constraints = var.constraints[:]
# Create the new solvers (states), applying one of the constraints
to each
- new_dstore1 = self.dstore.clone(constraint1, name=
self.name+'a')
- new_dstore2 = self.dstore.clone(constraint2, name=
self.name+'b')
+ new_dstore1 = state.dstore.clone(constraint1, name=
self.name+'a')
+ new_dstore2 = state.dstore.clone(constraint2, name=
self.name+'b')
# Create a new Solver for each dstore, preserving the accumulateod
penalty
- solver1 = Solver(self.constraints, new_dstore1,
- name=
self.name+'a', depth=self.depth+1,
- parent=self,
- verbosity=verbosity)
- solver2 = Solver(self.constraints, new_dstore2,
- name=
self.name+'b', depth=self.depth+1,
- parent=self,
- verbosity=verbosity)
- self.children.extend([solver1, solver2])
- return [((var, constraint2), solver2), ((var, constraint1),
solver1)]
+ state1 = SearchState(constraints=state.constraints,
dstore=new_dstore1,
+ name=
state.name+'a', depth=state.depth+1,
+ parent=state,
+ verbosity=verbosity)
+ state2 = SearchState(constraints=state.constraints,
dstore=new_dstore2,
+ name=
state.name+'b', depth=state.depth+1,
+ parent=state,
+ verbosity=verbosity)
+ state.children.extend([state, state2])
+ return [((var, constraint2), state2), ((var, constraint1), state1)]
+
+class SearchState:
+
+ def __init__(self, solver=None, name='', dstore=None,
+ constraints=None, parent=None,
+ depth=0, verbosity=0):
+ self.solver = solver
+
self.name = name
+ self.dstore = dstore
+ self.constraints = constraints
+ self.parent = parent
+ self.children = []
+ self.depth = depth
+ self.verbosity = verbosity
+
+ def __repr__(self):
+ return "<SS {}/{}>".format(
self.name, self.depth)
+
+ def fixed_point(self, awaken, verbosity=0):
+ if verbosity:
+ s = "# constraints to awaken: {}, # variables to determine: {}|
{}"
+ print(s.format(len(awaken), len(self.dstore.ess_undet),
len(self.dstore.undetermined)))
+ if self.dstore.is_determined():
+ # All essential variables are determined
+ self.status = Solver.succeeded
+ return True
+ elif len(awaken) == 0:
+ # More variables to determine; we have to distribute
+ self.status = Solver.distributable
+ return True
+ # Keep propagating
+ return False
+
+ def run(self, verbosity=0, tracevar=[]):
+ """Run the constraints until CS fails or a fixed point is
reached."""
+ if verbosity:
+ s = "Running {} with {}|{} undetermined variables, {}
constraints"
+ print(s.format(self, len(self.dstore.ess_undet),
len(self.dstore.undetermined), len(self.constraints)))
+ awaken = set(self.constraints)
+ it = 0
+ while not self.exit(awaken, verbosity=verbosity):
+ if verbosity:
+ print("Running iteration {}".format(it))
+ awaken = self.run_constraints(awaken, verbosity=verbosity,
tracevar=tracevar)
+ it += 1
+
+ def run_constraints(self, constraints, verbosity=0, tracevar=[]):
+ awaken = set()
+ all_changed = set()
+ for constraint in constraints:
+ state, changed_vars = constraint.run(dstore=self.dstore,
verbosity=verbosity, tracevar=tracevar)
+ all_changed.update(changed_vars)
+ if state == Constraint.entailed:
+ # Constraint is entailed; add it to the list of those.
+ self.entailed.append(constraint)
+ # Delete it from awaken if it's already there
+ if constraint in awaken:
+ awaken.remove(constraint)
+
+ if state == Constraint.failed:
+ if verbosity:
+ print("FAILED {}".format(constraint))
+ return Constraint.failed
+
+ # Check whether any of the changed vars cannot possibly be
determined; if so,
+ # the constraint fails
+ for var in changed_vars:
+ try:
+ var.determined(dstore=self.dstore, verbosity=verbosity)
+ except VarError:
+ if verbosity:
+ print("{} CAN'T BE DETERMINED, SO {} MUST
FAIL".format(var, constraint))
+ return Constraint.failed
+
+ for var in changed_vars:
+ # Add constraints for changed var to awaken unless those
constraints are already entailed
+ # or failed
+ update_cons = {c for c in var.constraints if c not in
self.entailed and c not in self.failed}
+ if var == tracevar and verbosity:
+ print('Adding {} constraints for changed variable
{}'.format(len(update_cons), tracevar))
+ awaken.update(update_cons)
+ if verbosity > 1:
+ print('# changed vars {}'.format(len(all_changed)))
+ return awaken
+
=======================================
--- /hiiktuu/languages/eng.lg Fri May 9 22:41:32 2014 UTC
+++ /hiiktuu/languages/eng.lg Wed May 14 07:28:54 2014 UTC
@@ -46,10 +46,10 @@
groups:
kick_v:
-# - words: [kick_v, the, bucket]
-# trans:
-# spa:
-# - [estirar_la_pata, {align: [0, 1, 2], agr: [{tns: tmp}, false,
false]}]
+ - words: [kick_v, the, bucket]
+ trans:
+ spa:
+ - [estirar_la_pata, {align: [0, 1, 2], agr: [{tns: tmp}, false,
false]}]
- words: [kick_v, $concrete]
trans:
spa:
=======================================
--- /hiiktuu/sentence.py Sun May 11 05:20:29 2014 UTC
+++ /hiiktuu/sentence.py Wed May 14 07:28:54 2014 UTC
@@ -49,6 +49,10 @@
# is right: all nodes in outer group before merged node must
# precede all nodes in inner group, and all nodes in outer group
# after merged node must follow all nodes in inner group.
+# 2014.05.11
+# -- Tree variables for unselected groups get removed from essential
+# variable list so the list of undetermined essential variables can
+# end up empty when it should be.
import itertools, copy
# ui.py loads language, etc.
@@ -399,6 +403,8 @@
self.variables['gnodes']]).constraints)
# All snodes must have distinct category nodes
self.constraints.extend(Disjoint([sn.variables['agnodes'] for sn
in self.nodes]).constraints)
+ # All snodes must have distinct concrete merged nodes nodes
+ self.constraints.extend(Disjoint([sn.variables['mgnodes'] for sn
in self.nodes]).constraints)
# All concrete gnodes must have distinct category nodes
self.constraints.extend(Disjoint([gn.variables['merge_agn'] for gn
in self.gnodes]).constraints)
# All position constraints for snodes
@@ -711,7 +717,8 @@
self.variables['cgnodes'] = self.variables['gnodes']
# SNode positions of GNodes for this GInst
self.svar('gnodes_pos', 'g{}->gnodes_pos'.format(self.index),
- set(), set(cand_snodes), self.ngnodes, self.ngnodes,
ess=False)
+ set(), set(cand_snodes), self.ngnodes, self.ngnodes)
+ # , ess=True)
# set(), set(range(nsnodes)), self.ngnodes, self.ngnodes)
# SNode positions of abstract GNodes for this GInst
if self.nanodes == 0:
@@ -746,11 +753,15 @@
# Trees under GInst head (including self)
if self.nanodes == 0:
# No abstract gnodes, so same as gnodes
- self.variables['tree'] = self.variables['gnodes_pos']
+ v = self.variables['gnodes_pos']
+ self.variables['tree'] = v
+ # Make this an essential variable
+ v.essential=True
+ self.sentence.dstore.ess_undet.append(v)
else:
self.svar('tree', 'g{}->tree'.format(self.index),
# at least as long as the number of self's nodes
- set(), set(cand_snodes), self.ngnodes,
len(cand_snodes), ess=False)
+ set(), set(cand_snodes), self.ngnodes,
len(cand_snodes), ess=True)
# set(), set(range(nsnodes)), self.ngnodes, nsnodes,
ess=True)
# Determined variable for within-source agreement constraints,
gen: 0}
agr = self.get_agr()
=======================================
--- /l3xdg/solver.py Fri May 9 22:41:32 2014 UTC
+++ /l3xdg/solver.py Wed May 14 07:28:54 2014 UTC
@@ -168,8 +168,8 @@
verbosity=verbosity)
def value(self, state):
- """Value for a state. Low number are better.
- Number of determined variables in the state's DStore.
+ """Value for a state. Low numbers are better.
+ Number of undetermined variables in the state's DStore.
"""
# return len(state.dstore.undetermined)
return len(state.dstore.get_undet())
=======================================
--- /paperdrafts/lglp/hltdi.bib Sat May 10 06:08:38 2014 UTC
+++ /paperdrafts/lglp/hltdi.bib Wed May 14 07:28:54 2014 UTC
@@ -183,7 +183,7 @@
year = {2009}
}
-@InProceedings{bojar:04,
+@InProceedings{bojar04,
author = {Ond\v{r}ej Bojar},
title = {Problems of Inducing Large Coverage Constraint-Based
Dependency Grammar},
booktitle = {Constraint Solving and Language Processing, First
International Workshop, {CSLP} 2004},
@@ -680,6 +680,17 @@
year = {1997},
OPTannote = {}
}
+
+@InProceedings{foth+menzel,
+ author = {Kilian Foth and Wolfgang Menzel},
+ title = {Hybrid Parsing: Using Probabilistic Models as
+ Predictors for a Symbolic Parser},
+ booktitle = {Proceedings of the Annual Conference of the Association
+ for Computational Linguistics},
+ year = {2006},
+ address = {Sydney, Australia},
+ annote = {}
+}
@BOOK{fromm:82,
AUTHOR = "H. Fromm",
@@ -1412,16 +1423,6 @@
volume = 5,
pages = "253--263"
}
-
-@ARTICLE{lesser:75,
- AUTHOR = "Victor R. Lesser, R. D. Fennel L.D. Erman and D.R Reddy",
- TITLE = "Organization of the \uppercase{H}earsay-{II} speech
- understanding system",
- JOURNAL = ICASSP,
- YEAR = "1975",
- VOLUME = "23",
- PAGES = "11-23"
-}
@Book{leslau:41,
author = {Wolf Leslau},
@@ -1574,6 +1575,16 @@
pages = "209--228",
volume = "16"
}
+
+@Article{mcshane+nirenburg,
+ author = {Marjorie Mc{S}hane and Sergei Nirenburg and James Cowie and
Ron Zacharski},
+ title = {Embedding Knowledge Elicition and {MT} Systems within a
Single Architecture},
+ journal = {Machine Translation},
+ year = {2002},
+ volume = {17},
+ pages = {271--305},
+ annote = {}
+}
@Article{marantz,
author = "Alec Marantz",
@@ -2149,6 +2160,15 @@
volume = "26",
pages = "113--122"
}
+
+@InProceedings{wang+harper,
+ author = {Wen Wang and Mary Harper},
+ title = {A Statistical Constraint Dependency Grammar ({CDG}) Parser},
+ year = {2004},
+ booktitle = {Proceedings of ACL04 Incremental Parsing Workshop},
+ address = {Barcelona, Spain},
+ annote = {}
+}
@Book{tamene,
author = {Tamene Bitima},
=======================================
--- /paperdrafts/lglp/lglp14.pdf Sun May 11 05:20:29 2014 UTC
+++ /paperdrafts/lglp/lglp14.pdf Wed May 14 07:28:54 2014 UTC
Binary file, no diff available.
=======================================
--- /paperdrafts/lglp/lglp14.tex Sun May 11 05:20:29 2014 UTC
+++ /paperdrafts/lglp/lglp14.tex Wed May 14 07:28:54 2014 UTC
@@ -148,10 +148,13 @@
will facilitate the documentation process.
We are particularly interested in MT and CAT and the grammars and lexica
that they require.
+We focus on MT and CAT because for most of the languages in question, the
lack of
+linguistic resources correlates with a lack of written material in the
language, and
+we would like to develop tools to aid human translators, including
non-professional ones,
+in translating documents into these languages.
Our long-term goal is a system that allows naive users to write bilingual
lexicon-grammars
for low-resource languages that can also be updated on the basis of
monolingual and bilingual corpora,
-to the extent these are available.
-Our long-term goals are most similar to those of the Apertium
\cite{apertium} project.
+to the extent these are available, and that can be easily integrated into
a CAT system.
In this paper we describe the initial steps in developing
Hiiktuu,\footnote{\textit{Hiiktuu} is the Oromo
word for a (female) translator.} a lexical-grammatical framework for MT
and CAT.
@@ -213,8 +216,7 @@
- words: [the, end, of, the, world]
trans:
spa:
- - [el_fin_del_mundo,
- {align: [1,2,3,0,4]}]
+ - [el_fin_del_mundo, {align: [1,2,3,0,4]}]
\end{verbatim}
\normalsize
%\end{spacing}
@@ -280,8 +282,8 @@
pass:
- root: pass_v, features: {prs: 1, tns: prs}
- root: pass_v, features: {prs: 2, tns: prs}
+ - root: pass_v, features: {prs: 3, num: plr, tns: prs}
- root: pass_n, features: {num: sng}
- - root: pass_v, features: {prs: 3, num: plr, tns: prs}
passes:
root: pass_v, features: {prs: 3, num: sng, tns: prs}
passed:
@@ -389,8 +391,8 @@
Since the group contains no subject, we constrain it to agree with the
person and number
of the verb.
Thus the entry for this group also contains the agreement attribute:
-\texttt{agr: [[1, 6, [prs, prs], [num, num]]]}.
-This states that the sixth element agrees with the first on person and
number features.
+\texttt{agr: [[2, 6, [prs, prs], [num, num]]]}.
+This states that the sixth element must agree with the second on person
and number features.
\section{Constraint satisfaction and translation}
\label{sect:cs}
@@ -437,21 +439,93 @@
\section{Related work}
\label{sect:related}
-... Apertium, MOLTO?, other grammatical formalisms? ...
+Our goals are most similar to those of the Apertium \cite{apertium}
project.
+As with Apertium, we are developing open-source, mostly rule-based systems
for MT.
+Also in common with Apertium, we work within the framework of relatively
shallow, chunk grammars.
+We differ mainly in our concern for flexibility, robustness, and
transparency.
+We are willing to sacrifice linguistic coverage and parsimony to achieve
these goals.
+We accommodate a range of lexical/grammatical possibilities, from the
completely
+lexical on the one extreme to phrasal units consisting of a single lexeme
and one or syntactic/semantic
+categories on the other, and we are not so concerned that Hiiktuu grammars
will accept many ungrammatical source-language
+sentences or even that they will output ungrammatical (along with
grammatical) target-language
+translations.
-\section{Status of project}
+With respect to our long-term goals, Hiiktuu also resembles the Expedition
+project \cite{mcshane+nirenburg}, which aims to make use of knowledge
acquisition
+techniques and naive monolingual informants
+in the development of rule-based MT systems that translate low-resource
source languages into Engish.
+Although it is likely we will make use of some of the insights of
Expedition,
+our project differs first, in assuming bilingual informants and second, in
aiming to
+develop systems that unrestricted with respect to target language.
+In fact we are more interested in MT systems with low-resource languages
as target languages
+because of the lack of documents in such languages.
+
+Although we would not want Hiiktuu to be taken seriously as a linguistic
theory, it is worth
+mentioning which theories it has the most in common with.
+Like Construction Grammar \cite{steels} and Frame Semantics
\cite{fillmoreFS},
+it treats linguistic knowledge as essential phrasal.
+Hiiktuu belongs to the family of dependency grammar (DG) theories because
the heads of its
+phrasal units are words or lexemes rather than non-terminals.
+It has the most in common with those computational DG theories that parse
sentences using
+constraint satisfaction \cite{bojar04,debusmann,foth+menzel,wang+harper}.
+However, in its current version, it remains an extremely primitive form of
dependency grammar,
+permitting only flat structures with unlabeled arcs and no relations
between groups
+other than through the merge operation described in \ref{subsect:cats}.
+This means that complex grammatical phenomena such as long-distance
dependencies and
+word-order variability can only be captured through specific groups.
+We prefer this approach because it remains easier for non-linguists to
understand.
+
+Finally, though we have not yet looked into the details,
+the theory's relative simplicity and flexibility should allow it to be
converted
+to other more elaborate formalisms, for example, synchronous context-free
grammars.
+
+\section{Status of project, ongoing and future work}
\label{sect:status}
-... tiiiiny lexicons
-\section{Ongoing and future work}
-\label{sect:future}
+Hiiktuu is written in Python;
+the code and implemented lexical/grammatical examples
+are available at [URL omitted from submission to preserve anonymity]
+under the GPL license.
+To date, we have only tested the framework on a limited number of
Amharic-to-Oromo
+translations.
+In order to develop a more complete lexicon/grammar for this language pair
and others,
+we are currently working on methods for automatically extracting groups
from
+dictionaries in various formats and from the limited bilingual data that
+are available.
+We are also implementing a GUI that will allow naive bilingual users to
+create Hiiktuu entries.
+For the longer term, our goal is tools for intelligent elicitation of
lexical entries
+[ref to nirenburg].
-... (G)UI for creating lexica, elicitation of lexica ...
-... between-group dependencies ...
-... application to Spa-Grn, Amh-Orm ...
+As far as the grammatical framework is concerned, as noted above,
+the lack of dependencies between the heads of groups leaves the system
+without the capacity to represent agreement constraints, for example,
agreement
+between a verb+object group and a group representing the verb's subject,
+or major constituent order differences between source and target
language.\footnote{
+The only way to implement such constraints in the current version of
Hiiktuu is through
+larger groups that incorporate, for example, subjects in verb-headed
groups, as in
+\textit{\$sbd kick\_v \$sth}.}
+To alleviate this problem, we will be implementing the possibility
+of dependencies between the chunks that are assigned to groups, much as in
the
+``interchunk module'' of Apertium.
\section{Conclusions}
\label{sect:conclusions}
+
+Relatively sophisticated computational grammars, parsers, and/or generators
+exist for perhaps a dozen languages, and usable machine translation
systems exist for
+at most dozens of pairs of languages.
+This leaves the great majority of languages and the communities who speak
them
+relatively even more disadvantaged than they were before the digital
revolution.
+What is called for are methods that can be quickly and easily deployed to
+begin to record the grammars and lexical of these languages and to use
these
+tools for the benefit of the linguistic communities, in part to aid in the
translation
+of documents into the languages.
+
+The Hiiktuu project is designed with these needs in mind.
+Though far from achieving our ultimate goals, we have developed a simple,
flexible, and robust
+framework for bilingual lexicon-grammars and MT/CAT that we hope will be a
starting
+point for a large number of under-resourced languages.
% include your own bib file like this:
%\bibliographystyle{acl}