Revision: cd9d94ebdd08
Branch: default
Author: Michael Gasser <
gas...@cs.indiana.edu>
Date: Wed Feb 19 07:17:01 2014 UTC
Log: L3Lite: groups (MWEs), a few UI functions.
http://code.google.com/p/hltdi-l3/source/detail?r=cd9d94ebdd08
Added:
/l3lite/ui.py
Modified:
/l3lite/__init__.py
/l3lite/entry.py
/l3lite/language.py
/l3xdg/dimension.py
/l3xdg/languages/es/a_chunk.inst
/l3xdg/languages/es/chunk.yaml
/lite.py
=======================================
--- /dev/null
+++ /l3lite/ui.py Wed Feb 19 07:17:01 2014 UTC
@@ -0,0 +1,64 @@
+#
+# L3Lite UI: initial attempt at a user interface for creating languages
+#
+########################################################################
+#
+# This file is part of the HLTDI L^3 project
+# for parsing, generation, translation, and computer-assisted
+# human translation.
+#
+# Copyright (C) 2014, HLTDI <
gas...@cs.indiana.edu>
+#
+# This program is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation, either version 3 of
+# the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <
http://www.gnu.org/licenses/>.
+#
+# =========================================================================
+
+# 2014.02.15
+# -- Created
+
+from .language import *
+import os
+
+LANGUAGE_DIR = os.path.join(os.path.dirname(__file__), 'languages')
+
+# Later a UI class? Subclass of tkinter Frame?
+
+def load_language():
+ abbrev = input("Give abbreviation for language.\n>> ")
+ path = os.path.join(LANGUAGE_DIR, abbrev + '.lg')
+ print(path)
+ try:
+ return Language.read(path)
+ except IOError:
+ print("That language doesn't seem to exist.")
+ return
+
+def add_word(language):
+ word = input("Write the word to be added to the lexicon.\n>> ")
+ if word in language.words:
+ response = input("There's already a word with that form in the
lexicon; add another? ")
+ if not response or response[0].lower() == 'y':
+ return add_word1(word, language)
+ return
+ else:
+ return add_word1(word, language)
+
+def add_word1(word, language):
+ cls = None
+ response = input("Do you want to assign a class to the word? ")
+ if not response or response[0].lower() == 'y':
+ class_names = list(language.classes.keys())
+ cls = input("Choose from these
classes:\n{}\n>> ".format(' '.join(class_names)))
+ return language.add_word(word, cls=cls)
+
=======================================
--- /l3lite/__init__.py Mon Feb 10 21:51:55 2014 UTC
+++ /l3lite/__init__.py Wed Feb 19 07:17:01 2014 UTC
@@ -1,5 +1,5 @@
"""Do-it-yourself L3. Create simple bilingual lexicons and grammars for
language pairs."""
-__all__ = ['language']
+__all__ = ['language', 'entry', 'ui']
-from .language import *
+from .ui import *
=======================================
--- /l3lite/entry.py Mon Feb 10 21:51:55 2014 UTC
+++ /l3lite/entry.py Wed Feb 19 07:17:01 2014 UTC
@@ -27,34 +27,68 @@
# 2014.02.10
# -- Created
# Possible subclasses: Lex (word, lexeme, class), Gram
+# 2014.02.12
+# -- Inheritance (class to word/lexeme): completed except for government.
+# (But possible conflicts in order are not handled yet.)
+# 2014.02.15
+# -- Methods for making dicts from entries and entries from dict, used
+# in serialization.
+# 2014.02.16-18
+# -- Class for groups (multi-word expressions).
+# 2014.02.18
+# -- Cloning of Lex instances (for groups and L3 nodes).
+
+import copy
+import yaml
class Entry:
"""The central unit in L3Lite, containing lexical and grammatical
constraints that are instantiated during processing."""
- ID = 0
+ ID = 1
- def __init__(self, name, language, cls=None):
- """Initialize name and features:
- trans, depsin, depsout, grams, agr, gov, order, class, count."""
+ def __init__(self, name, language, id=0):
+ """Initialize name and basic features: language, trans, count,
id."""
self.name = name
self.language = language
- self.depsin = None
- self.depsout = None
self.trans = None
- self.order = None
- self.agr = None
-
self.gov = None
- self.grams = None
- self.cls = cls
self.count = 1
-
self.id = Entry.ID
- Entry.ID += 1
+ if id:
+
self.id = id
+ else:
+
self.id = Entry.ID
+ Entry.ID += 1
def __repr__(self):
"""Print name."""
return '<{}:{}>'.format(
self.name,
self.id)
+ ## Serialization
+
+ def to_dict(self):
+ """Convert the entry to a dictionary to be serialized in a yaml
file."""
+ d = {}
+ d['name'] =
self.name
+# d['language'] = self.language
+ d['count'] = self.count
+ if self.trans:
+ d['trans'] = self.trans
+ d['id'] =
self.id
+ return d
+
+ @staticmethod
+ def from_dict(d, language):
+ """Convert a dict (loaded from a yaml file) to an Entry object."""
+ e = Entry(d.get('name'), language)
+ e.count = d.get('count', 1)
+
e.id = d.get('id', 1)
+ e.trans = d.get('trans')
+ return e
+
+ def update_count(self, count=1):
+ """Update count on the basis of data from somewhere."""
+ self.count += count
+
### Translations (word, gram, lexeme entries)
def get_trans(self, language, create=False):
@@ -70,7 +104,7 @@
"""Add translation to the translation dictionary dictionary for
language,
initializing its count."""
transdict = self.get_trans(language, create=True)
- transdict[trans] = count
+ transdict[trans] = {'count': count}
def update_trans(self, language, trans, count=1):
"""Update the count of translation."""
@@ -78,7 +112,89 @@
if trans not in transdict:
s = "Attempting to update non-existent translation {} for {}"
raise(EntryError(s.format(trans,
self.name)))
- transdict[trans] += count
+ transdict[trans][count] += count
+
+ def add_trans_dep(self, language, trans, typ, src_dep, targ_dep):
+ """Add a translation dependency specification.
+ typ is iso(morphic), reverse, sepheads (separate heads).
+ src_dep is the label on the source language dependency.
+ targ_dep is the label on the target language dependency."""
+
+class Lex(Entry):
+
+ cloneID = 1
+
+ def __init__(self, name, language, cls=None, id=0):
+ """In addition to Entry features, initialize
+ depsin, depsout, order, agr, gov, grams, and (for word and lexeme)
class."""
+ Entry.__init__(self, name, language, id=id)
+ self.depsin = None
+ self.depsout = None
+ self.order = None
+ self.agr = None
+
self.gov = None
+ self.grams = None
+ self.cls = cls
+ self.cloneID = 0
+
+ def __repr__(self):
+ """Print name."""
+ return '<{}:{}{}>'.format(
self.name,
self.id, ';' +
str(self.cloneID) if self.cloneID else '')
+
+ ## Cloning
+ ## Needed for groups, which consist of copies of lexes and
+ ## for L3 node entries
+
+ def clone(self):
+ copied = Lex(
self.name, self.language, cls=self.cls, id=
self.id)
+ copied.depsin = self.depsin
+ copied.depsout = self.depsout
+ copied.order = self.order
+ copied.agr = self.agr
+
copied.gov =
self.gov
+ copied.grams = self.grams
+ copied.cloneID = Lex.cloneID
+ Lex.cloneID += 1
+ return copied
+
+ ## Serialization
+
+ def to_dict(self):
+ """Convert the lex to a dictionary to be serialized in a yaml
file."""
+ d = Entry.to_dict(self)
+ if self.depsin:
+ d['depsin'] = copy.deepcopy(self.depsin)
+ if self.depsout:
+ d['depsout'] = copy.deepcopy(self.depsout)
+ if self.order:
+ d['order'] = copy.deepcopy(self.order)
+ if self.agr:
+ d['agr'] = copy.deepcopy(self.agr)
+ if
self.gov:
+ d['gov'] = copy.deepcopy(
self.gov)
+ if self.grams:
+ d['grams'] = self.grams.copy()
+ if self.cls:
+ d['cls'] = self.cls
+ return d
+
+ @staticmethod
+ def from_dict(d, language):
+ """Convert a dict (loaded from a yaml file) to a Lex object."""
+ l = Lex(d.get('name'), language, cls=d.get('cls'))
+ if d.get('depsin'):
+ l.depsin = d.get('depsin')
+ if d.get('depsout'):
+ l.depsout = d.get('depsout')
+ if d.get('order'):
+ l.order = d.get('order')
+ if d.get('agr'):
+ l.agr = d.get('agr')
+ if d.get('gov'):
+
l.gov = d.get('gov')
+ if d.get('grams'):
+ l.grams = d.get('grams')
+ return l
## Dependencies (word, lexeme, class entries)
@@ -131,23 +247,25 @@
"""Get the set of order constraint tuples, creating the set if
it's not there
and create is True."""
if self.order is None and create:
- self.order = set()
+ self.order = []
return self.order
def add_order(self, constraint):
"""Add an order constraint tuple to the set of order
constraints."""
order_constraints = self.get_order(create=True)
- order_constraints.add(constraint)
+ order_constraints.append(constraint)
self.language.record_order(constraint)
## Grammatical features associated with words, classes, and lexemes
def get_gram(self, feature, create=False):
- """Get the possible values and their counts for grammatical
feature."""
+ """Get the possible values and their counts for grammatical
feature.
+ If this is a word, the value is a string; if a class or lexeme, a
dict
+ of values and counts."""
if self.grams is None:
self.grams = {}
- if feature not in self.grams and create:
- self.grams[feature] = {}
+# if feature not in self.grams and create:
+# self.grams[feature] = {}
return self.grams.get(feature)
def set_gram(self, feat, values):
@@ -161,6 +279,7 @@
self.grams[feat] = values
def update_gram_value(self, feat, value, count=1):
+ """Add count to the current count for feature value."""
gram = self.get_gram(feat, create=True)
if value in gram:
gram[value] += count
@@ -176,7 +295,7 @@
"""Add an agreement constraint to the list of constraints in the
entry."""
if self.agr is None:
self.agr = []
- self.agr.append((deplabel, head_feat, dep_feat))
+ self.agr.append([deplabel, head_feat, dep_feat])
## A government constraint requires a dependency label, a dependent
feature,
## and a dependent value.
@@ -184,7 +303,208 @@
def add_gov(self, deplabel, dep_feat, dep_value):
if
self.gov is None:
self.gov = []
- self.gov.append((deplabel, dep_feat, dep_value))
+ self.gov.append([deplabel, dep_feat, dep_value])
+
+ ## Inheritance: copying features from classes to lexemes and words
+ ## at initialization
+
+ def inherit(self):
+ if not self.cls:
+ return
+ cls = self.language.get_class(self.cls)
+ if not cls:
+ s = "Class {} for {} does not exist"
+ raise(EntryError(s.format(self.cls, self)))
+ self.inherit_deps(cls)
+ self.inherit_order(cls)
+ self.inherit_grams(cls)
+ self.inherit_agr(cls)
+ self.inherit_gov(cls)
+ # Also inherit translation?
+
+ def inherit_deps(self, cls):
+ """Inherit dependency constraints (in and out) from class."""
+ # In
+ cls_depsin = cls.depsin
+ if cls_depsin:
+ if self.depsin is None:
+ self.depsin = {}
+ for label, cls_constraints in cls_depsin.items():
+ if label in self.depsin:
+ constraints = self.depsin[label]
+ for k, v in cls_constraints.items():
+ if k in constraints:
+ continue
+ constraints[k] = v
+ else:
+ # Should this be a copy of cls_constraints?
+ self.depsin[label] = cls_constraints
+ # Out
+ cls_depsout = cls.depsout
+ if cls_depsout:
+ if self.depsout is None:
+ self.depsout = {}
+ for label, cls_constraints in cls_depsout.items():
+ if label in self.depsout:
+ constraints = self.depsout[label]
+ for k, v in cls_constraints.items():
+ if k in constraints:
+ continue
+ constraints[k] = v
+ else:
+ # Should this be a copy of cls_constraints?
+ self.depsout[label] = cls_constraints
+
+ def inherit_order(self, cls):
+ """Inherit order constraints from class."""
+ cls_order = cls.order
+ if cls_order:
+ my_order = self.get_order(create=True)
+ # Just add all constraints (tuples) from the class to ones
+ # already there in the word or lexeme; what if there are
+ # conflicts?? (sort these out later)
+ for co in cls_order:
+ if co not in my_order:
+ my_order.append(co)
+
+ def inherit_grams(self, cls):
+ """Inherit grammatical features from class."""
+ cls_grams = cls.grams
+ if cls_grams:
+ if self.grams is None:
+ self.grams = {}
+ for feature, value in cls_grams.items():
+ if feature in self.grams:
+ # word/lexeme gram has priority over class, so
+ # ignore this
+ continue
+ # copy any other feature/value constraint
+ # (should the value be a copy??)
+ self.grams[features] = value
+
+ def inherit_agr(self, cls):
+ """Inherit agreement constraints from class."""
+ cls_agr = cls.agr
+ if cls_agr:
+ if self.agr is None:
+ self.agr = []
+ for constraint in cls_agr:
+ if constraint not in self.agr:
+ self.agr.append(constraint)
+
+ def inherit_gov(self, cls):
+ """Inherit government constraints from class."""
+
+class Group(Entry):
+ """Multi-word expressions. Each group consists of a head
+ and a set of dependency/dependent pairs, together with
+ an order for the words.
+ Variable slots have dedicated names that allow them to be
+ referenced in translations.
+ Groups must be created *after* other lexical items.
+ For example:
+ read_the_riot_act:
+ [read, {ord: 0,
+ io: [?io, 1],
+ do: [act, {ord: 4, nmod: [riot, {ord: 3}], det: [the, {ord:
2}]}]}]
+ trans {spa: [cantar_las_cuarenta, [[?io, ?oi]]]}
+
+ cantar_las_cuarenta:
+ [cantar, {lexeme: cantar, ord: 0,
+ oi: [?oi],
+ od: [cuarenta, {det: [las]}]}]
+ """
+
+ def __init__(self, name, language, head, head_feats=None,
dependents=None, head_lexeme=False):
+ """name of a Group is something like acabar_de_V.
+ head is the word that is the syntactic head of the group."""
+ Entry.__init__(self, name, language)
+ # An index->[word, {dep: word...}] dict
+ self.words = {}
+ self.word_id = 0
+ if head_lexeme:
+ self.head_lexeme = True
+ head_type = language.get_lexeme(head)
+ else:
+ self.head_lexeme = False
+ # There may be more than one of these; for now just use the
first
+ head_type = language.get_words(head)[0]
+ if not head_type:
+ s = "No existing lexical entry in {} for head of group {}"
+ raise(EntryError(s.format(language, name)))
+ self.head = head_type.clone()
+ self.words[self.word_id] = [self.head, {}]
+ self.word_id += 1
+
+ def __repr__(self):
+ """Print name."""
+ return '<{}:{}>'.format(
self.name,
self.id)
+
+ # Serialization
+
+ def to_dict(self):
+ """Convert the group to a dictionary to be serialized in a yaml
file."""
+ d = Entry.to_dict(self)
+ d['head_lexeme'] = self.head_lexeme
+ d['words'] = {}
+ w = d['words']
+ for index, lex in self.words.items():
+ l = lex[0]
+ name =
l.name
+ w[index] = [name]
+ if len(lex) == 2:
+ w[index].append(copy.deepcopy(lex[1]))
+ return d
+
+ @staticmethod
+ def from_dict(d, language):
+ """Convert a dict (loaded from a yaml file) to a Lex object."""
+ lexeme = d['head_lexeme']
+ g = Group(d.get('name'), language, d.get('words').get(0)[0],
head_lexeme=lexeme)
+ for id, word in d.get('words').items():
+ if id == 0:
+ # Just handle the dependencies for this case
+ deps = word[1]
+ g.words[id][1] = copy.deepcopy(deps)
+ else:
+ name = word[0]
+ lex = language.get_words(name)[0]
+ if len(word) == 2:
+ deps = word[1]
+ lex_info = [lex.clone(), copy.deepcopy(deps)]
+ else:
+ lex_info = [lex.clone()]
+ g.words[id] = lex_info
+ return g
+
+ def add_word(self, word, head_id, dependency):
+ """Add a word to the group, as dependent on dependency from
head."""
+ # For now, use first word entry
+ typ = self.language.get_words(word)[0]
+ if not typ:
+ s = "No existing lexical entry in {} for word {} in group {}"
+ raise(EntryError(s.format(language, word, self)))
+ word = typ.clone()
+ self.words[self.word_id] = [word]
+ self.word_id += 1
+ head_list = self.words.get(head_id)
+ if not head_list:
+ s = "No word in {} with internal ID {}"
+ raise(EntryError(s.format(self, head_id)))
+ if len(head_list) == 1:
+ head_list.append({})
+ head, deps = head_list
+ if dependency not in deps:
+ deps[dependency] = []
+ deps[dependency].append(self.word_id)
+ return word
+
+ def get_lex(self, id):
+ """Return the Lex with the given index."""
+ if id not in self.words:
+ s = "{} has no word with index {}"
+ raise(EntryError(s.format(self, id)))
+ return self.words[id][0]
class EntryError(Exception):
'''Class for errors encountered when attempting to update an entry.'''
=======================================
--- /l3lite/language.py Mon Feb 10 21:51:55 2014 UTC
+++ /l3lite/language.py Wed Feb 19 07:17:01 2014 UTC
@@ -28,16 +28,21 @@
# -- Created
# 2014.02.10
# -- Made entries a separate class.
+# 2014.02.15
+# -- Methods for serializing and deserializing languages (using YAML).
from .entry import *
+import os, yaml
class Language:
"""Dictionaries of words, lexemes, grammatical features, and
lexical classes."""
+
+ languages = []
def __init__(self,
name, abbrev,
- words=None, lexemes=None, grams=None, classes=None):
+ words=None, lexemes=None, grams=None, classes=None,
groups=None):
"""Initialize dictionaries and names."""
self.name = name
self.abbrev = abbrev
@@ -46,18 +51,93 @@
self.lexemes = lexemes or {}
self.grams = grams or {}
self.classes = classes or {}
+ self.groups = groups or {}
# Record possibilities for dependency labels, feature values,
order constraints
self.possible = {}
+ Language.languages.append(abbrev)
def __repr__(self):
"""Print name."""
return '<<{}>>'.format(
self.name)
+ def to_dict(self):
+ """Convert the language to a dictionary to be serialized as a yaml
file."""
+ d = {}
+ d['name'] =
self.name
+ d['abbrev'] = self.abbrev
+ d['possible'] = self.possible
+ # Entries: each is a dict, whose values must be converted to dicts
+ if self.grams:
+ grams = {}
+ for k, v in self.grams.items():
+ grams[k] = v.to_dict()
+ d['grams'] = grams
+ if self.classes:
+ classes = {}
+ for k, v in self.classes.items():
+ classes[k] = v.to_dict()
+ d['classes'] = classes
+ # Lexemes and words should probably be separate dictionaries (and
files).
+ if self.lexemes:
+ lexemes = {}
+ for k, v in self.lexemes.items():
+ lexemes[k] = v.to_dict()
+ d['lexemes'] = lexemes
+ if self.words:
+ words = {}
+ for k, v in self.words.items():
+ # Words are lists
+ words[k] = [lex.to_dict() for lex in v]
+ d['words'] = words
+ return d
+
+ def write(self, directory, filename=''):
+ """Serialize the language."""
+ filename = filename or self.abbrev + '.lg'
+ path = os.path.join(directory, filename)
+ with open(path, 'w', encoding='utf8') as file:
+ yaml.dump(self.to_dict(), file)
+
+ @staticmethod
+ def from_dict(d):
+ """Convert a dict (loaded from a yaml file) to a Language
object."""
+ l = Language(d.get('name'), d.get('abbrev'))
+ l.possible = d.get('possible')
+ grams = d.get('grams')
+ if grams:
+ l.grams = {}
+ for k, v in grams.items():
+ l.grams[k] = Entry.from_dict(v, l)
+ classes = d.get('classes')
+ if classes:
+ l.classes = {}
+ for k, v in classes.items():
+ l.classes[k] = Lex.from_dict(v, l)
+ lexemes = d.get('lexemes')
+ if lexemes:
+ l.lexemes = {}
+ for k, v in lexemes.items():
+ l.lexemes[k] = Lex.from_dict(v, l)
+ words = d.get('words')
+ if words:
+ l.words = {}
+ for k, v in words.items():
+ l.words[k] = [Lex.from_dict(lex, l) for lex in v]
+ return l
+
+ @staticmethod
+ def read(path):
+ """Create a Language from the contents of a yaml file, a dict
+ that must be then converted to a Language."""
+ with open(path, encoding='utf8') as file:
+ dct = yaml.load(file)
+ return Language.from_dict(dct)
+
### Basic setters. Create entries (dicts) for item. For debugging
purposes, include name
### in entry.
def add_word(self, word, cls=None):
- entry = Entry(word, self, cls=cls)
+ entry = Lex(word, self, cls=cls)
if word in self.words:
self.words[word].append(entry)
else:
@@ -65,7 +145,7 @@
return entry
def add_lexeme(self, lexeme, cls=None):
- entry = Entry(lexeme, self, cls=cls)
+ entry = Lex(lexeme, self, cls=cls)
if lexeme in self.lexemes:
s = "Lexeme {} already in dictionary"
raise(LanguageError(s.format(lexeme)))
@@ -73,18 +153,25 @@
return entry
def add_class(self, cls):
- entry = Entry(cls, self)
+ entry = Lex(cls, self)
if cls in self.classes:
s = "Class {} already in dictionary"
raise(LanguageError(s.format(cls)))
self.classes[cls] = entry
return entry
+ def add_group(self, name, head, head_lexeme=False):
+ entry = Group(name, self, head, head_lexeme=head_lexeme)
+ if head not in self.groups:
+ self.groups[head] = []
+ self.groups[head].append(entry)
+ return entry
+
def add_gram(self, gram, feature, count=1):
"""A gram, for example, 'plural', must have a feature, for example,
'number'."""
entry = Entry(gram, self)
- if gram in self.lexemes:
+ if gram in self.grams:
s = "Grammatical morpheme {} already in dictionary"
raise(LanguageError(s.format(gram)))
self.grams[gram] = entry
@@ -130,8 +217,9 @@
def record_label(self, label):
"""Record the dependency label in the set of possible labels."""
if 'deplabels' not in self.possible:
- self.possible['deplabels'] = set()
- self.possible['deplabels'].add(label)
+ self.possible['deplabels'] = []
+ if label not in self.possible['deplabels']:
+ self.possible['deplabels'].append(label)
def get_possible_labels(self):
return self.possible.get('deplabels')
@@ -142,8 +230,10 @@
def record_order(self, constraint):
"""Record the constraint tuple in the set of possible constraints
for the language."""
if 'order' not in self.possible:
- self.possible['order'] = set()
- self.possible['order'].add(constraint)
+ self.possible['order'] = []
+ if constraint not in self.possible['order']:
+ # Append a *copy* of the constraint list
+ self.possible['order'].append(constraint[:])
def get_possible_orders(self):
return self.possible.get('order')
@@ -154,8 +244,10 @@
"""An agreement constraint is a tuple consisting of
dep label, head feature, dependent feature."""
if 'agr' not in self.possible:
- self.possible['agr'] = set()
- self.possible['agr'].add(constraint)
+ self.possible['agr'] = []
+ if constraint not in self.possible['agr']:
+ # Append a *copy* of the constraint list
+ self.possible['agr'].append(constraint[:])
class LanguageError(Exception):
'''Class for errors encountered when attempting to update the
language.'''
=======================================
--- /l3xdg/dimension.py Mon Feb 10 21:51:55 2014 UTC
+++ /l3xdg/dimension.py Wed Feb 19 07:17:01 2014 UTC
@@ -665,8 +665,6 @@
# For each of out and in there are two set variables, one
(index 0)
# for the indices of the nodes on the other end of links
with the labels
# and one (index 1) for the possible cardinalities of the
first.
- # In fact the latter (the cardinality variables) are only
needed when
- # CS is handled by propagators rather than by projectors.
# But check to see whether these can be determined
variables by looking
# at the cardinality constraints for all entries.
pre_arc = None
@@ -1046,10 +1044,10 @@
# if any([x == None for x in cards]):
# print('cards {}, abbrev {}, node {}, label {},
direction {}'.format(cards, abbrev, node, label, direction))
# Create determined set variables for each cardinality
set if we're doing propagator CS
- if not self.project:
- var_name = '{}{}{}/{}'
- card_det_vars = [DetSVar(var_name.format(label,
cat, node.index, i), c) for i, c in enumerate(cards)]
- main_seqs.append([cardvar, card_det_vars])
+# if not self.project:
+ var_name = '{}{}{}/{}'
+ card_det_vars = [DetSVar(var_name.format(label, cat,
node.index, i), c) for i, c in enumerate(cards)]
+ main_seqs.append([cardvar, card_det_vars])
# Constrain cardinality of the label variable
if not isinstance(labelvar, Determined):
# Propagator CS
@@ -1067,12 +1065,12 @@
## self.add_propagator(lower_cardprop)
## self.add_propagator(upper_cardprop)
# Constrain cardinality with lexical entry selection constraint
- # CardSelection constraints go here, replacing the
IntSelection and CardinalitySubset constraints
- if not self.project: # and not isinstance(lexvar, Determined):
- for cardvar, card_det_vars in main_seqs:
- prop = IntSelection(cardvar, lexvar, card_det_vars,
problem=problem)
+ # With projectors, CardSelection constraints went here,
replacing the IntSelection and CardinalitySubset constraints
+## if not self.project: # and not isinstance(lexvar,
Determined):
+ for cardvar, card_det_vars in main_seqs:
+ prop = IntSelection(cardvar, lexvar, card_det_vars,
problem=problem)
# print('Adding intsel {}'.format(prop))
- self.add_propagator(prop)
+ self.add_propagator(prop)
def _entry_label_card(self, entry, dim_abbrev, label, direction):
"""The possible cardinalities for daughters on arcs with label in
direction for a dimension in an entry.
=======================================
--- /l3xdg/languages/es/a_chunk.inst Sun Oct 27 23:25:16 2013 UTC
+++ /l3xdg/languages/es/a_chunk.inst Wed Feb 19 07:17:01 2014 UTC
@@ -47,6 +47,7 @@
& fuerte pl= fuertes gna=mbarete:adj gnv=mbarete:vc
& fértil pl= fértiles
& general pl= generales
+& grande pl= grandes
& grave pl= graves
& hipócrita pl= hipócritas
& horrible pl= horribles
@@ -154,6 +155,7 @@
& blando fs=blanda mp=blandos fp=blandas
& bonito fs=bonita mp=bonitos fp=bonitas
& bruto fs=bruta mp=brutos fp=brutas
+& bueno fs=buena mp=buenos fp=buenas
& cansado fs=cansada mp=cansados fp=cansadas
& caprichoso fs=caprichosa mp=caprichosos fp=caprichosas
& caro fs=cara mp=caros fp=caras
=======================================
--- /l3xdg/languages/es/chunk.yaml Fri Jan 31 00:54:18 2014 UTC
+++ /l3xdg/languages/es/chunk.yaml Wed Feb 19 07:17:01 2014 UTC
@@ -412,21 +412,21 @@
ins: {preadj: '?', postadj: '?', apred: '?'}
agrs: {pos: 2}
-## Irregular adjectives: buen(o), algun(o), primer(o), tercer(o), ningun(o)
+## Irregular adjectives: buen(o), gran(de), algun(o), primer(o),
tercer(o), ningun(o)
# bueno: pred adj
-- word: bueno
- pos: adj
- syn:
- ins: {apred: '!'}
- agrs: {pos: 2,
- ng: [[0,0]], tam: [[0],[1]], neg: [[0],[1]],
- cont: 0, prf: 0,
- sj: [[1,0,0],[0,1,0],[0,0,0],[1,0,1],[0,1,1],[0,0,1]]}
- cross:
- gn:
- lex: porã:vc
- synsyn:
- agree: [[tmp, tam], [cont, cont], [prf, prf], [neg, neg], [sj, sj]]
+#- word: bueno
+# pos: adj
+# syn:
+# ins: {apred: '!'}
+# agrs: {pos: 2,
+# ng: [[0,0]], tam: [[0],[1]], neg: [[0],[1]],
+# cont: 0, prf: 0,
+# sj: [[1,0,0],[0,1,0],[0,0,0],[1,0,1],[0,1,1],[0,0,1]]}
+# cross:
+# gn:
+# lex: porã:vc
+# synsyn:
+# agree: [[tmp, tam], [cont, cont], [prf, prf], [neg, neg], [sj,
sj]]
# buen: before noun
- word: buen
pos: adj
=======================================
--- /lite.py Mon Feb 10 21:51:55 2014 UTC
+++ /lite.py Wed Feb 19 07:17:01 2014 UTC
@@ -41,21 +41,42 @@
def eg():
e = l3lite.Language('español', 'spa')
+ mm = e.add_word('mujer', cls='sus')
+ e.add_gram('plur', 'num')
+ e.add_gram('sing', 'num')
+ e.add_gram('pres', 'tmp')
+ e.add_gram('pret', 'tmp')
+ mm.add_trans('grn', 'kuña')
+ mm.add_trans('grn', 'kuñakarai')
+ mm.set_gram('num', 'sing')
+ mm.set_gram('gen', 'fem')
+ la = e.add_word('la', 'det')
+ la.set_gram('num', 'sing')
+ la.set_gram('gen', 'fem')
+ ss = e.add_class('sus')
+ ss.add_depin('sj', {'min': 0, 'max': 1, 'dflt': 0})
+ ss.add_depin('oj', {'min': 0, 'max': 1, 'dflt': 0})
+ ss.add_depout('det', {'min': 0, 'max': 1, 'dflt': 1})
+ ss.add_order(['det', '^'])
+ ss.set_gram('num', {'sing': 2, 'plur': 1})
+ ss.set_gram('gen', {'masc': 1, 'fem': 1})
+ ss.add_agr('det', 'gen', 'gen')
+ dd = e.add_class('det')
+ dd.add_depin('det', {'min': 1, 'max': 1})
g = l3lite.Language('guarani', 'grn')
- mm = e.add_word('mujer', cls='sus_fem')
kk = g.add_word('kuña')
- mm.add_trans('grn', 'kuña')
- mm.add_trans('grn', 'kuñakarai')
- e.add_gram('plural', 'número')
- e.add_gram('singular', 'número')
- e.add_gram('presente', 'tiempo')
- e.add_gram('pretérito', 'tiempo')
- mm.add_depin('sj', {'min': 0, 'max': 1, 'dflt': 0})
- mm.add_depin('oj', {'min': 0, 'max': 1, 'dflt': 0})
- mm.add_depout('det', {'min': 0, 'max': 1, 'dflt': 1})
- mm.set_gram('número', {'singular': 1})
- mm.add_order(('det', '^'))
- return e, g
+ la.inherit()
+ mm.inherit()
+ E = l3lite.Language('English', 'eng')
+ rr = E.add_word('read')
+ tt = E.add_word('the')
+ rrr = E.add_word('riot')
+ rrr = E.add_word('act')
+ rra = E.add_group('read_the_riot_act', 'read')
+ rra.add_word('act', 0, 'do')
+ rra.add_word('the', 1, 'det')
+ rra.add_word('riot', 0, 'nmod')
+ return e, g, E
if __name__ == "__main__":
print('L^3 Lite, version {}\n'.format(__version__))