r71838 - in trunk/mcs: gmcs mcs

3 views
Skip to first unread message

Miguel de Icaza (miguel@novell.com)

unread,
Jan 28, 2007, 4:46:11 PM1/28/07
to mono-p...@lists.ximian.com, ximian....@gmail.com, mono-svn-patche...@googlegroups.com
Author: miguel
Date: 2007-01-28 16:46:10 -0500 (Sun, 28 Jan 2007)
New Revision: 71838

Added:
trunk/mcs/mcs/lambda.test
Modified:
trunk/mcs/gmcs/ChangeLog
trunk/mcs/gmcs/Makefile
trunk/mcs/gmcs/cs-parser.jay
trunk/mcs/gmcs/gmcs.exe.sources
trunk/mcs/mcs/ChangeLog
trunk/mcs/mcs/class.cs
trunk/mcs/mcs/cs-parser.jay
trunk/mcs/mcs/cs-tokenizer.cs
trunk/mcs/mcs/driver.cs
trunk/mcs/mcs/report.cs
trunk/mcs/mcs/support.cs
Log:
2007-01-28 Miguel de Icaza <mig...@novell.com>

* lambda.cs: Empty new file, will eventually have the lambda
expression implementation.

* lambda.test: used to test the internal tokenizer.

* report.cs (FeatureIsNotISO1): Rename from
FeatureIsNotStandardized, because it was about the language level
(1 vs 2) it was not about standarization.

(FeatureRequiresLINQ): New.

* support.cs (SeekableStreamReader): Only require that the reader
is a TextReader, not a StreamReader, so we can plug StringReader.

* cs-tokenizer.cs (parse_type_and_parameter): Returns true if at a
given position in the input stream the following tokens can be
parsed as a type followed by an identifier.

(is_punct): after a '(' if parse_type_and_parameter returns true,
then return a special token OPEN_PARENS_LAMBDA which is used to
avoid reduce/reduce errors in the grammar for the
lambda_expression rules.

(parse_type): implement a type parser inside the
tokenizer, the parser only returns true or false depending on
whether the input at a given position can be parsed as a type.

(peek_token): new method used during type parsing.

2007-01-28 Miguel de Icaza <mig...@novell.com>

* cs-parser.jay: New grammar bits for parsing lambda expressions.


Modified: trunk/mcs/gmcs/ChangeLog
===================================================================
--- trunk/mcs/gmcs/ChangeLog 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/gmcs/ChangeLog 2007-01-28 21:46:10 UTC (rev 71838)
@@ -1,3 +1,7 @@
+2007-01-28 Miguel de Icaza <mig...@novell.com>
+
+ * cs-parser.jay: New grammar bits for parsing lambda expressions.
+
2007-01-28 Raja R Harinath <rhar...@novell.com>

Fix #80534, gtest-309.cs

Modified: trunk/mcs/gmcs/Makefile
===================================================================
--- trunk/mcs/gmcs/Makefile 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/gmcs/Makefile 2007-01-28 21:46:10 UTC (rev 71838)
@@ -96,3 +96,5 @@
-rm -f gmcs3.exe
make gmcs3.exe RUNTIME_FLAGS='--profile=$(PROFILER)'

+typetest:
+ mono gmcs.exe --typetest ../mcs/lambda.test
\ No newline at end of file

Modified: trunk/mcs/gmcs/cs-parser.jay
===================================================================
--- trunk/mcs/gmcs/cs-parser.jay 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/gmcs/cs-parser.jay 2007-01-28 21:46:10 UTC (rev 71838)
@@ -204,6 +204,7 @@
%token WHILE
%token ARGLIST
%token PARTIAL
+%token ARROW

/* C# keywords which are not really keywords */
%token GET "get"
@@ -273,6 +274,7 @@
%token LITERAL_STRING "string literal"

%token IDENTIFIER
+%token OPEN_PARENS_LAMBDA
%token CLOSE_PARENS_CAST
%token CLOSE_PARENS_NO_CAST
%token CLOSE_PARENS_OPEN_PARENS
@@ -344,7 +346,7 @@
if (s != "alias"){
Report.Error (1003, lt.Location, "'alias' expected");
} else if (RootContext.Version == LanguageVersion.ISO_1) {
- Report.FeatureIsNotStandardized (lt.Location, "external alias");
+ Report.FeatureIsNotISO1 (lt.Location, "external alias");
} else {
lt = (LocatedToken) $3;
current_namespace.UsingExternalAlias (lt.Value, lt.Location);
@@ -2763,13 +2765,13 @@
{
$$ = $2;
if (RootContext.Version == LanguageVersion.ISO_1)
- Report.FeatureIsNotStandardized (lexer.Location, "generics");
+ Report.FeatureIsNotISO1 (lexer.Location, "generics");
}
| GENERIC_DIMENSION
{
$$ = new TypeArguments ((int) $1, lexer.Location);
if (RootContext.Version == LanguageVersion.ISO_1)
- Report.FeatureIsNotStandardized (lexer.Location, "generics");
+ Report.FeatureIsNotISO1 (lexer.Location, "generics");
}
;

@@ -3457,7 +3459,7 @@
current_block = (Block) oob_stack.Pop ();

if (RootContext.Version == LanguageVersion.ISO_1){
- Report.FeatureIsNotStandardized (loc, "anonymous methods");
+ Report.FeatureIsNotISO1 (loc, "anonymous methods");
$$ = null;
} else {
ToplevelBlock anon_block = (ToplevelBlock) $4;
@@ -3861,9 +3863,84 @@
}
;

+implicitly_typed_lambda_parameter_list
+ : IDENTIFIER {
+ ArrayList a = new ArrayList (4);
+ a.Add ($1);
+ $$ = a;
+ }
+ | implicitly_typed_lambda_parameter_list COMMA IDENTIFIER {
+ ArrayList a = (ArrayList) $1;
+ a.Add ($3);
+ $$ = a;
+ }
+ ;
+
+explicitly_typed_lambda_parameter_list
+ : explicitly_typed_lambda_parameter
+ {
+ ArrayList pars = new ArrayList (4);
+ pars.Add ($1);
+
+ $$ = pars;
+ }
+ | explicitly_typed_lambda_parameter_list COMMA explicitly_typed_lambda_parameter
+ {
+ ArrayList pars = (ArrayList) $1;
+ pars.Add ($3);
+
+ $$ = pars;
+ }
+ ;
+
+explicitly_typed_lambda_parameter
+ : parameter_modifier type IDENTIFIER
+ {
+ LocatedToken lt = (LocatedToken) $3;
+
+ $$ = new Parameter ((Expression) $2, lt.Value, (Parameter.Modifier) $1, null, lt.Location);
+ }
+ | type IDENTIFIER
+ {
+ LocatedToken lt = (LocatedToken) $3;
+
+ $$ = new Parameter ((Expression) $2, lt.Value, Parameter.Modifier.NONE, null, lt.Location);
+ }
+ ;
+
+lambda_parameter_list
+ : implicitly_typed_lambda_parameter_list { $$ = $1; }
+ | explicitly_typed_lambda_parameter_list { $$ = $1; }
+ ;
+
+lambda_expression_body
+ : expression
+ | block
+ ;
+
+lambda_expression
+ : IDENTIFIER ARROW lambda_expression_body
+ {
+ LocatedToken lt = (LocatedToken) $2;
+ ArrayList a = new ArrayList (1);
+ a.Add ($1);
+ $$ = new LambdaExpression (a, $3, lt.Location);
+ }
+ | OPEN_PARENS CLOSE_PARENS ARROW lambda_expression_body {
+ LocatedToken lt = (LocatedToken) $3;
+ $$ = new LambdaExpression (new ArrayList (), $3, lt.Location);
+ }
+ | OPEN_PARENS_LAMBDA lambda_parameter_list CLOSE_PARENS ARROW lambda_expression_body
+ {
+ LocatedToken lt = (LocatedToken) $4;
+ $$ = new LambdaExpression ((ArrayList) $2, $5, lt.Location);
+ }
+ ;
+
expression
: conditional_expression
| assignment_expression
+ | lambda_expression
;

constant_expression
@@ -4682,7 +4759,7 @@
$$ = null;
}
if (RootContext.Version == LanguageVersion.ISO_1){
- Report.FeatureIsNotStandardized (lt.Location, "yield statement");
+ Report.FeatureIsNotISO1 (lt.Location, "yield statement");
$$ = null;
}
if (anonymous_host == null){
@@ -4707,7 +4784,7 @@
$$ = null;
}
if (RootContext.Version == LanguageVersion.ISO_1){
- Report.FeatureIsNotStandardized (lt.Location, "yield statement");
+ Report.FeatureIsNotISO1 (lt.Location, "yield statement");
$$ = null;
}
if (anonymous_host == null){

Modified: trunk/mcs/gmcs/gmcs.exe.sources
===================================================================
--- trunk/mcs/gmcs/gmcs.exe.sources 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/gmcs/gmcs.exe.sources 2007-01-28 21:46:10 UTC (rev 71838)
@@ -19,6 +19,7 @@
../mcs/flowanalysis.cs
generic.cs
../mcs/iterators.cs
+../mcs/lambda.cs
../mcs/literal.cs
../mcs/location.cs
../mcs/modifiers.cs

Modified: trunk/mcs/mcs/ChangeLog
===================================================================
--- trunk/mcs/mcs/ChangeLog 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/mcs/ChangeLog 2007-01-28 21:46:10 UTC (rev 71838)
@@ -1,3 +1,34 @@
+2007-01-28 Miguel de Icaza <mig...@novell.com>
+
+ * lambda.cs: Empty new file, will eventually have the lambda
+ expression implementation.
+
+ * lambda.test: used to test the internal tokenizer.
+
+ * report.cs (FeatureIsNotISO1): Rename from
+ FeatureIsNotStandardized, because it was about the language level
+ (1 vs 2) it was not about standarization.
+
+ (FeatureRequiresLINQ): New.
+
+ * support.cs (SeekableStreamReader): Only require that the reader
+ is a TextReader, not a StreamReader, so we can plug StringReader.
+
+ * cs-tokenizer.cs (parse_type_and_parameter): Returns true if at a
+ given position in the input stream the following tokens can be
+ parsed as a type followed by an identifier.
+
+ (is_punct): after a '(' if parse_type_and_parameter returns true,
+ then return a special token OPEN_PARENS_LAMBDA which is used to
+ avoid reduce/reduce errors in the grammar for the
+ lambda_expression rules.
+
+ (parse_type): implement a type parser inside the
+ tokenizer, the parser only returns true or false depending on
+ whether the input at a given position can be parsed as a type.
+
+ (peek_token): new method used during type parsing.
+
2007-01-28 Raja R Harinath <rhar...@novell.com>

Fix #80531

Modified: trunk/mcs/mcs/class.cs
===================================================================
--- trunk/mcs/mcs/class.cs 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/mcs/class.cs 2007-01-28 21:46:10 UTC (rev 71838)
@@ -2885,7 +2885,7 @@
this.ModFlags = Modifiers.Check (AllowedModifiers, mod, accmods, Location);

if (IsStatic && RootContext.Version == LanguageVersion.ISO_1) {
- Report.FeatureIsNotStandardized (Location, "static classes");
+ Report.FeatureIsNotISO1 (Location, "static classes");
}
}

@@ -5837,7 +5837,7 @@
base (parent, type, mod, AllowedModifiers, new MemberName (name, loc), attrs)
{
if (RootContext.Version == LanguageVersion.ISO_1)
- Report.FeatureIsNotStandardized (loc, "fixed size buffers");
+ Report.FeatureIsNotISO1 (loc, "fixed size buffers");

this.size_expr = size_expr;
}
@@ -6458,7 +6458,7 @@
anonymous_methods = accessor.AnonymousMethods;

if (accessor.ModFlags != 0 && RootContext.Version == LanguageVersion.ISO_1) {
- Report.FeatureIsNotStandardized (Location, "access modifiers on properties");
+ Report.FeatureIsNotISO1 (Location, "access modifiers on properties");
}
}

Modified: trunk/mcs/mcs/cs-parser.jay
===================================================================
--- trunk/mcs/mcs/cs-parser.jay 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/mcs/cs-parser.jay 2007-01-28 21:46:10 UTC (rev 71838)
@@ -202,6 +202,7 @@
%token WHILE
%token ARGLIST
%token PARTIAL
+%token ARROW

/* C# keywords which are not really keywords */
%token GET "get"
@@ -227,7 +228,9 @@
%token BANG "!"
%token ASSIGN "="
%token OP_LT "<"
+%token OP_GENERICS_LT "<"
%token OP_GT ">"
+%token OP_GENERICS_GT ">"
%token BITWISE_AND "&"
%token BITWISE_OR "|"
%token STAR "*"
@@ -269,6 +272,7 @@
%token LITERAL_STRING "string literal"

%token IDENTIFIER
+%token OPEN_PARENS_LAMBDA
%token CLOSE_PARENS_CAST
%token CLOSE_PARENS_NO_CAST
%token CLOSE_PARENS_OPEN_PARENS
@@ -337,7 +341,7 @@
if (s != "alias"){
Report.Error (1003, lt.Location, "'alias' expected");
} else if (RootContext.Version == LanguageVersion.ISO_1) {
- Report.FeatureIsNotStandardized (lt.Location, "external alias");
+ Report.FeatureIsNotISO1 (lt.Location, "external alias");
} else {
lt = (LocatedToken) $3;
current_namespace.UsingExternalAlias (lt.Value, lt.Location);
@@ -3197,7 +3201,7 @@
current_block = (Block) oob_stack.Pop ();

if (RootContext.Version == LanguageVersion.ISO_1){
- Report.FeatureIsNotStandardized (loc, "anonymous methods");
+ Report.FeatureIsNotISO1 (loc, "anonymous methods");
$$ = null;
} else {
ToplevelBlock anon_block = (ToplevelBlock) $4;
@@ -4299,7 +4303,7 @@
$$ = null;
}
if (RootContext.Version == LanguageVersion.ISO_1){
- Report.FeatureIsNotStandardized (lt.Location, "yield statement");
+ Report.FeatureIsNotISO1 (lt.Location, "yield statement");
$$ = null;
}
if (anonymous_host == null){
@@ -4324,7 +4328,7 @@
$$ = null;
}
if (RootContext.Version == LanguageVersion.ISO_1){
- Report.FeatureIsNotStandardized (lt.Location, "yield statement");
+ Report.FeatureIsNotISO1 (lt.Location, "yield statement");
$$ = null;
}
if (anonymous_host == null){

Modified: trunk/mcs/mcs/cs-tokenizer.cs
===================================================================
--- trunk/mcs/mcs/cs-tokenizer.cs 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/mcs/cs-tokenizer.cs 2007-01-28 21:46:10 UTC (rev 71838)
@@ -10,13 +10,8 @@
// (C) 2001, 2002 Ximian, Inc (http://www.ximian.com)
// (C) 2004 Novell, Inc
//
+//

-/*
- * TODO:
- * Make sure we accept the proper Unicode ranges, per the spec.
- * Report error 1032
-*/
-
using System;
using System.Text;
using System.Collections;
@@ -45,6 +40,7 @@
bool handle_assembly = false;
bool handle_constraints = false;
bool handle_typeof = false;
+ bool linq;
Location current_location;
Location current_comment_location = Location.Null;
ArrayList escapedIdentifiers = new ArrayList ();
@@ -472,6 +468,7 @@
{
this.ref_name = file;
this.file_name = file;
+ linq = RootContext.Version == LanguageVersion.LINQ;
reader = input;

putback_char = -1;
@@ -628,6 +625,164 @@
nullable_pos = -1;
}
#endif
+
+ int peek_token ()
+ {
+ int the_token;
+
+ PushPosition ();
+ the_token = token ();
+ PopPosition ();
+ return the_token;
+ }
+
+ bool parse_opt_type_arguments ()
+ {
+ int next = peek_token ();
+ if (next == Token.OP_GENERICS_LT){
+ while (true) {
+ token ();
+ if (!parse_namespace_or_typename (-1))
+ return false;
+ next = peek_token ();
+ if (next == Token.COMMA)
+ continue;
+ if (next == Token.OP_GENERICS_GT || next == Token.OP_GT){
+ token ();
+ return true;
+ }
+ return false;
+ }
+ }
+ if (next == Token.OP_GT || next == Token.OP_GENERICS_GT){
+ token ();
+ return true;
+ }
+ return true;
+ }
+
+ bool parse_namespace_or_typename (int next)
+ {
+ again:
+ if (next == -1)
+ next = peek_token ();
+ if (next == Token.IDENTIFIER){
+ token ();
+ next = peek_token ();
+ if (next == Token.DOT || next == Token.DOUBLE_COLON){
+ token ();
+ next = peek_token ();
+ goto again;
+ }
+ if (next == Token.OP_GENERICS_LT || next == Token.OP_LT){
+ token ();
+ if (parse_opt_type_arguments ())
+ return true;
+ }
+ return true;
+ }
+
+ return false;
+ }
+
+ bool is_simple_type (int token)
+ {
+ return (token == Token.BOOL ||
+ token == Token.DECIMAL ||
+ token == Token.SBYTE ||
+ token == Token.BYTE ||
+ token == Token.SHORT ||
+ token == Token.USHORT ||
+ token == Token.INT ||
+ token == Token.UINT ||
+ token == Token.LONG ||
+ token == Token.ULONG ||
+ token == Token.CHAR ||
+ token == Token.FLOAT ||
+ token == Token.DOUBLE);
+ }
+
+ bool is_builtin_reference_type (int token)
+ {
+ return (token == Token.OBJECT || token == Token.STRING);
+ }
+
+ bool parse_opt_rank (int next)
+ {
+ while (true){
+ if (next != Token.OPEN_BRACKET)
+ return true;
+
+ token ();
+ while (true){
+ next = token ();
+ if (next == Token.CLOSE_BRACKET){
+ next = peek_token ();
+ break;
+ }
+ if (next == Token.COMMA)
+ continue;
+
+ return false;
+ }
+ }
+ }
+
+ bool parse_type ()
+ {
+ int next = peek_token ();
+
+ if (is_simple_type (next)){
+ token ();
+ next = peek_token ();
+ if (next == Token.INTERR)
+ token ();
+ return parse_opt_rank (peek_token ());
+ }
+ if (parse_namespace_or_typename (next)){
+ next = peek_token ();
+ if (next == Token.INTERR)
+ token ();
+ return parse_opt_rank (peek_token ());
+ } else if (is_builtin_reference_type (next)){
+ token ();
+ return parse_opt_rank (peek_token ());
+ }
+
+ return false;
+ }
+
+ //
+ // Invoked after '(' has been seen and tries to parse
+ // a type expression followed by an identifier, if this
+ // is the case, instead of returning an OPEN_PARENS token
+ // we return a special token that triggers lambda parsing.
+ //
+ // This is needed because we can not introduce the
+ // explicitly_typed_lambda_parameter_list after a '(' in the
+ // grammar without introducing reduce/reduce conflicts.
+ //
+ // We need to parse a type and if it is followed by an
+ // identifier, we know it has to be parsed as a lambda
+ // expression.
+ //
+ // the type expression can be prefixed with `ref' or `out'
+ //
+ public bool parse_type_and_parameter ()
+ {
+ int next = peek_token ();
+
+ if (next == Token.REF || next == Token.OUT)
+ token ();
+
+ if (parse_type ()){
+ next = peek_token ();
+ if (next == Token.IDENTIFIER)
+ return true;
+ }
+ return false;
+ }
+
int is_punct (char c, ref bool doread)
{
int d;
@@ -650,7 +805,17 @@
case ']':
return Token.CLOSE_BRACKET;
case '(':
- return Token.OPEN_PARENS;
+ if (linq){
+ PushPosition ();
+ bool is_type_and_parameter = parse_type_and_parameter ();
+ PopPosition ();
+
+ if (is_type_and_parameter)
+ return Token.OPEN_PARENS_LAMBDA;
+ else
+ return Token.OPEN_PARENS;
+ } else
+ return Token.OPEN_PARENS;
case ')': {
if (deambiguate_close_parens == 0)
return Token.CLOSE_PARENS;
@@ -796,6 +961,10 @@
doread = true;
return Token.OP_EQ;
}
+ if (d == '>'){
+ doread = true;
+ return Token.ARROW;
+ }
return Token.ASSIGN;
}

@@ -2061,7 +2230,7 @@

case "pragma":
if (RootContext.Version == LanguageVersion.ISO_1) {
- Report.FeatureIsNotStandardized (Location, "#pragma");
+ Report.FeatureIsNotISO1 (Location, "#pragma");
return true;
}

Modified: trunk/mcs/mcs/driver.cs
===================================================================
--- trunk/mcs/mcs/driver.cs 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/mcs/driver.cs 2007-01-28 21:46:10 UTC (rev 71838)
@@ -211,6 +211,7 @@
"Other flags in the compiler\n" +
" --fatal Makes errors fatal\n" +
" --parse Only parses the source file\n" +
+ " --typetest Tests the tokenizer's built-in type parser\n" +
" --stacktrace Shows stack trace at error location\n" +
" --timestamp Displays time stamps of various compiler events\n" +
" --expect-error X Expect that error X will be encountered\n" +
@@ -701,6 +702,39 @@
Console.WriteLine ("Mono C# compiler version {0}", version);
Environment.Exit (0);
}
+
+ //
+ // This is to test the tokenizer internal parser that is used to deambiguate
+ // '(' type identifier from '(' type others so that we are able to parse
+ // without introducing reduce/reduce conflicts in the grammar.
+ //
+ static void LambdaTypeParseTest (string fname)
+ {
+ bool fail = false;
+
+ using (FileStream fs = File.OpenRead (fname)){
+ StreamReader r = new StreamReader (fs, encoding);
+ string line;
+
+ while ((line = r.ReadLine ())!= null){
+ if (line [0] == '!')
+ continue;
+ bool must_pass = line [0] == '+';
+ StringReader test = new StringReader (line.Substring (1));
+ SeekableStreamReader reader = new SeekableStreamReader (test);
+ SourceFile file = new SourceFile (fname, fname, 0);
+
+ Tokenizer lexer = new Tokenizer (reader, file, defines);
+ bool res = lexer.parse_type_and_parameter ();
+
+ Console.WriteLine ("{3} ({1}=={2}): {0}", line.Substring (1), res, must_pass, res == must_pass);
+ if (res != must_pass)
+ fail = true;
+ }
+ }
+ Console.WriteLine ("fail={0}", fail);
+ Environment.Exit (fail ? 1 : 0);
+ }

//
// Currently handles the Unix-like command line options, but will be
@@ -967,6 +1001,14 @@
Report.Warning (-29, 1, "Compatibility: Use -noconfig option instead of --noconfig");
load_default_config = false;
return true;
+
+ case "--typetest":
+ if ((i + 1) >= args.Length){
+ Report.Error (5, "--typetest requires a filename argument");
+ Environment.Exit (1);
+ }
+ LambdaTypeParseTest (args [++i]);
+ return true;
}

return false;

Added: trunk/mcs/mcs/lambda.test
===================================================================
--- trunk/mcs/mcs/lambda.test 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/mcs/lambda.test 2007-01-28 21:46:10 UTC (rev 71838)
@@ -0,0 +1,34 @@
+!
+! This file contains tests for the tokenizer to recognize a type followed
+! by an identifier. This is used for the internal deambiguation
+! used for lambda_expressions
+!
++identifier::identifier<> name
++identifier::identifier<arg::two,arg<>,blah::blah<blah>> name
++file?[,] name
++file?[,,,] name
+-dingus<<int>? name
+-dingus?<int> name
++identifier name
++identifier.dot.identifier name
++identifier<typeparm>.name name
++identifier<typeparm> name
++identifier<identifier<identifier,two,three>,two>.identifier<two,three> name
++identifier::identifier name
++identifier::identifier<arg> name
++identifier::identifier<arg::one> name
++identifier::identifier<arg::one,arg::two> name
++identifier::identifier<arg,arg::two> name
++identifier::identifier<arg::two,arg> name
++identifier::identifier<arg::two,arg<second,third>> name
++identifier<arg::one> name
++file[] name
++file[][][][] name
++file[,,,,][,][][,] name
++file[][,,,] name
++dingus<type[]> name
++dingus<type[,,,]> name
++dingus<type[,,,][]> name
++dingus<type[,,,][,]> name
++dingus<type[,,,][,]> name
++file file

Modified: trunk/mcs/mcs/report.cs
===================================================================
--- trunk/mcs/mcs/report.cs 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/mcs/report.cs 2007-01-28 21:46:10 UTC (rev 71838)
@@ -236,10 +236,18 @@

}

- public static void FeatureIsNotStandardized (Location loc, string feature)
+ public static void FeatureIsNotISO1 (Location loc, string feature)
{
- Report.Error (1644, loc, "Feature `{0}' cannot be used because it is not part of the standardized ISO C# language specification", feature);
+ Report.Error (1644, loc,
+ "Feature `{0}' cannot be used because it is not part of the C# 1.0 language specification",
+ feature);
}
+
+ public static void FeatureRequiresLINQ (Location loc, string feature)
+ {
+ Report.Error (1644, loc,
+ "Feature `{0}' can only be used if the language level is LINQ", feature);
+ }

public static string FriendlyStackTrace (Exception e)
{

Modified: trunk/mcs/mcs/support.cs
===================================================================
--- trunk/mcs/mcs/support.cs 2007-01-28 21:24:54 UTC (rev 71837)
+++ trunk/mcs/mcs/support.cs 2007-01-28 21:46:10 UTC (rev 71838)
@@ -361,7 +361,7 @@
/// </summary>
public class SeekableStreamReader
{
- public SeekableStreamReader (StreamReader reader)
+ public SeekableStreamReader (TextReader reader)
{
this.reader = reader;
this.buffer = new char [AverageReadLength * 3];
@@ -374,7 +374,7 @@
: this (new StreamReader (stream, encoding, true))
{ }

- StreamReader reader;
+ TextReader reader;

private const int AverageReadLength = 1024;

Reply all
Reply to author
Forward
0 new messages