diff --git a/.gitignore b/.gitignore index bddaa6e..3221e81 100644 --- a/.gitignore +++ b/.gitignore @@ -40,4 +40,6 @@ coverage.xml site/ # ignore log files -temp/ \ No newline at end of file +temp/ + +pfdl_scheduler/plugins/parser/** \ No newline at end of file diff --git a/pfdl_grammar/PFDLLexer.g4 b/pfdl_grammar/PFDLLexer.g4 index 086328c..d4a1e47 100644 --- a/pfdl_grammar/PFDLLexer.g4 +++ b/pfdl_grammar/PFDLLexer.g4 @@ -29,6 +29,10 @@ def nextToken(self): return self.denter.next_token() } +// The comment below allows the PFDL plugin system to insert the lexer rules at this point +// This ensures that newly added rules are not skipped for rules below like 'STARTS_WITH_LOWER_C_STR' +// {Plugin_Insertion_Point} + // Main grammar STRUCT: 'Struct'; TASK: 'Task'; @@ -41,7 +45,6 @@ PARALLEL: 'Parallel'; CONDITION: 'Condition'; PASSED: 'Passed'; FAILED: 'Failed'; -ON_DONE: 'OnDone'; END: 'End'; NUMBER_P: 'number'; STRING_P: 'string'; diff --git a/pfdl_grammar/PFDLParser.g4 b/pfdl_grammar/PFDLParser.g4 index a8c5dac..754210d 100644 --- a/pfdl_grammar/PFDLParser.g4 +++ b/pfdl_grammar/PFDLParser.g4 @@ -5,13 +5,25 @@ options { } program: - (NL | struct | task)* EOF; + program_statement* EOF; + +program_statement: + NL | struct | task | instance; struct: - STRUCT STARTS_WITH_UPPER_C_STR INDENT (variable_definition NL+)+ DEDENT END; + STRUCT STARTS_WITH_UPPER_C_STR (COLON struct_id)? INDENT ( + variable_definition NL+ + )+ DEDENT END; + +struct_id: STARTS_WITH_UPPER_C_STR; task: - TASK STARTS_WITH_LOWER_C_STR INDENT task_in? statement+ task_out? DEDENT END; + TASK STARTS_WITH_LOWER_C_STR INDENT task_in? taskStatement+ task_out? DEDENT END; + +instance: + struct_id STARTS_WITH_LOWER_C_STR INDENT ( + attribute_assignment NL + )+ DEDENT END; task_in: IN INDENT (variable_definition NL+)+ DEDENT; @@ -19,6 +31,9 @@ task_in: task_out: OUT INDENT (STARTS_WITH_LOWER_C_STR NL+)+ DEDENT; +taskStatement: + statement; + statement: service_call | task_call @@ -80,6 +95,9 @@ primitive: attribute_access: STARTS_WITH_LOWER_C_STR (DOT STARTS_WITH_LOWER_C_STR array?)+; +attribute_assignment: + STARTS_WITH_LOWER_C_STR COLON (value | json_object); + array: ARRAY_LEFT (INTEGER | STARTS_WITH_LOWER_C_STR)? ARRAY_RIGHT; diff --git a/pfdl_scheduler/model/array.py b/pfdl_scheduler/model/array.py index def6538..1f21557 100644 --- a/pfdl_scheduler/model/array.py +++ b/pfdl_scheduler/model/array.py @@ -64,7 +64,7 @@ def __radd__(self, other) -> str: return other + str(self) def __eq__(self, __o: object) -> bool: - if isinstance(__o, Array): + if hasattr(__o, "values") and hasattr(__o, "length") and hasattr(__o, "type_of_elements"): return ( self.values == __o.values and self.length == __o.length diff --git a/pfdl_scheduler/model/instance.py b/pfdl_scheduler/model/instance.py new file mode 100644 index 0000000..bc7d4da --- /dev/null +++ b/pfdl_scheduler/model/instance.py @@ -0,0 +1,118 @@ +# Copyright The PFDL Contributors +# +# Licensed under the MIT License. +# For details on the licensing terms, see the LICENSE file. +# SPDX-License-Identifier: MIT + +"""Contains Instance class.""" + +# standard libraries +import copy +from numbers import Number +from typing import Dict, Union + +# 3rd party libs +from antlr4.ParserRuleContext import ParserRuleContext + +# local sources +## PFDL base sources +from pfdl_scheduler.model.array import Array +from pfdl_scheduler.pfdl_base_classes import PFDLBaseClasses +from pfdl_scheduler.validation.error_handler import ErrorHandler + + +class Instance: + """Represents an Instance in the PFDL. + + Attributes: + name: A string representing the name of the Instance. + attributes: A dict mapping the attribute names with their values. + struct_name: A string refering to the Struct this Instance instanciates. + context: ANTLR context object of this class. + attribute_contexts: A dict that maps the attribute names to their ANTLR contexts. + """ + + def __init__( + self, + name: str = "", + attributes: Dict[str, Union[str, Number, bool, "Instance"]] = None, + struct_name: str = "", + context: ParserRuleContext = None, + ) -> None: + """Initialize the object. + + Args: + name: A string representing the name of the Instance. + attributes: A dict mapping the attribute names with their values. + struct_name: A string refering to the Struct this Instance instanciates. + context: ANTLR context object of this class. + """ + self.name: str = name + + if attributes: + self.attributes: Dict[str, Union[str, Number, bool, "Instance"]] = attributes + else: + self.attributes: Dict[str, Union[str, Number, bool, "Instance"]] = {} + + self.struct_name: str = struct_name + self.context: ParserRuleContext = context + self.attribute_contexts: Dict = {} + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for attr, value in self.__dict__.items(): + try: + setattr(result, attr, copy.deepcopy(value, memo)) + except Exception: + setattr(result, attr, value) + return result + + @classmethod + def from_json( + cls, + json_object: Dict, + error_handler: ErrorHandler, + struct_context: ParserRuleContext, + pfdl_base_classes=PFDLBaseClasses, + ): + return parse_json(json_object, error_handler, struct_context, pfdl_base_classes) + + +def parse_json( + json_object: Dict, + error_handler: ErrorHandler, + instance_context: ParserRuleContext, + pfdl_base_classes=PFDLBaseClasses, +) -> Instance: + """Parses the JSON Struct initialization. + + Returns: + An Instance object representing the initialized instance. + """ + instance = pfdl_base_classes.get_class("Instance")() + instance.context = instance_context + for identifier, value in json_object.items(): + if isinstance(value, (int, str, bool)): + instance.attributes[identifier] = value + elif isinstance(value, list): + array = pfdl_base_classes.get_class("Array")() + instance.attributes[identifier] = array + for element in value: + if isinstance(element, (int, float, str, bool)): + if isinstance(element, bool): + array.type_of_elements = "boolean" + elif isinstance(element, (int, float)): + array.type_of_elements = "number" + else: + array.type_of_elements = "string" + array.append_value(element) + elif isinstance(element, dict): + inner_struct = parse_json(element, error_handler) + array.append_value(inner_struct) + elif isinstance(value, dict): + inner_struct = parse_json(value, error_handler, instance_context, pfdl_base_classes) + instance.attributes[identifier] = inner_struct + + return instance diff --git a/pfdl_scheduler/model/process.py b/pfdl_scheduler/model/process.py index c9038fc..b65cc06 100644 --- a/pfdl_scheduler/model/process.py +++ b/pfdl_scheduler/model/process.py @@ -11,6 +11,7 @@ from typing import Dict # local sources +from pfdl_scheduler.model.instance import Instance from pfdl_scheduler.model.struct import Struct from pfdl_scheduler.model.task import Task @@ -25,6 +26,7 @@ class Process: Attributes: structs: A dict for mapping the Struct names to the Struct objects. task: A dict for mapping the Task names to the Task objects. + instances: A dict for mappign the Instance names to the Instance objects. start_task_name: the name of the start task of the PFDL program (typically "productionTask"). """ @@ -32,6 +34,7 @@ def __init__( self, structs: Dict[str, Struct] = None, tasks: Dict[str, Task] = None, + instances: Dict[str, Instance] = None, start_task_name: str = "productionTask", ) -> None: """Initialize the object. @@ -39,6 +42,7 @@ def __init__( Args: structs: A dict for mapping the Struct names to the Struct objects. tasks: A dict for mapping the Task names to the Task objects. + instances: A dict for mappign the Instance names to the Instance objects. start_task_name: the name of the start task of the PFDL program (typically "productionTask"). """ if structs: @@ -49,4 +53,8 @@ def __init__( self.tasks: Dict[str, Task] = tasks else: self.tasks: Dict[str, Task] = {} + if instances: + self.instances: Dict[str, Task] = instances + else: + self.instances: Dict[str, Task] = {} self.start_task_name = start_task_name diff --git a/pfdl_scheduler/model/service.py b/pfdl_scheduler/model/service.py index 253f4ed..9de00f7 100644 --- a/pfdl_scheduler/model/service.py +++ b/pfdl_scheduler/model/service.py @@ -14,11 +14,11 @@ from antlr4.ParserRuleContext import ParserRuleContext # local sources +from pfdl_scheduler.model.instance import Instance from pfdl_scheduler.model.struct import Struct from pfdl_scheduler.model.array import Array -@dataclass class Service: """Represents a Service or Service Call in the PFDL. @@ -36,7 +36,7 @@ class Service: def __init__( self, name: str = "", - input_parameters: List[Union[str, List[str], Struct]] = None, + input_parameters: List[Union[str, List[str], Instance]] = None, output_parameters: Dict[str, Union[str, Array]] = None, context: ParserRuleContext = None, ) -> None: @@ -51,9 +51,9 @@ def __init__( self.name: str = name if input_parameters: - self.input_parameters: List[Union[str, List[str], Struct]] = input_parameters + self.input_parameters: List[Union[str, List[str], Instance]] = input_parameters else: - self.input_parameters: List[Union[str, List[str], Struct]] = [] + self.input_parameters: List[Union[str, List[str], Instance]] = [] if output_parameters: self.output_parameters: OrderedDict[str, Union[str, Array]] = output_parameters diff --git a/pfdl_scheduler/model/struct.py b/pfdl_scheduler/model/struct.py index 820db3d..ffe12f9 100644 --- a/pfdl_scheduler/model/struct.py +++ b/pfdl_scheduler/model/struct.py @@ -9,7 +9,7 @@ # standard libraries import copy from dataclasses import dataclass -from typing import Dict, Union +from typing import Dict, Type, Union import json # 3rd party libraries @@ -31,6 +31,8 @@ class Struct: name: A string representing the name of the Struct. attributes: A dict which maps the attribute names to the defined type or a value (if its a instantiated struct). + parent_struct_name: A string representin the identifier of the parent struct + from which this struct inherits the attributes context: ANTLR context object of this class. context_dict: Maps other attributes with ANTLR context objects. """ @@ -39,6 +41,7 @@ def __init__( self, name: str = "", attributes: Dict[str, Union[str, Array, "Struct"]] = None, + parent_struct_name: str = "", context: ParserRuleContext = None, ) -> None: """Initialize the object. @@ -47,6 +50,8 @@ def __init__( name: A string representing the name of the Struct. attributes: A dict which maps the attribute names to the defined type or a value (if its a instantiated struct). + parent_struct_name: A string representin the identifier of the parent struct + from which this struct inherits the attributes context: ANTLR context object of this class. """ self.name: str = name @@ -54,11 +59,17 @@ def __init__( self.attributes: Dict[str, Union[str, Array, "Struct"]] = attributes else: self.attributes: Dict[str, Union[str, Array, "Struct"]] = {} + self.parent_struct_name: str = parent_struct_name self.context: ParserRuleContext = context self.context_dict: Dict = {} def __eq__(self, __o: object) -> bool: - if isinstance(__o, Struct): + if ( + hasattr(__o, "name") + and hasattr(__o, "attributes") + and hasattr(__o, "context") + and hasattr(__o, "context_dict") + ): return ( self.name == __o.name and self.attributes == __o.attributes @@ -80,7 +91,11 @@ def __deepcopy__(self, memo): @classmethod def from_json( - cls, json_string: str, error_handler: ErrorHandler, struct_context: ParserRuleContext + cls, + json_string: str, + error_handler: ErrorHandler, + struct_context: ParserRuleContext, + struct_class: Type, ) -> "Struct": """Creates a Struct instance out of the given JSON string. @@ -92,12 +107,15 @@ def from_json( The Struct which was created from the JSON string. """ json_object = json.loads(json_string) - struct = parse_json(json_object, error_handler, struct_context) + struct = parse_json(json_object, error_handler, struct_context, struct_class) return struct def parse_json( - json_object: Dict, error_handler: ErrorHandler, struct_context: ParserRuleContext + json_object: Dict, + error_handler: ErrorHandler, + struct_context: ParserRuleContext, + struct_class: Type, ) -> Struct: """Parses the JSON Struct initialization. @@ -105,11 +123,12 @@ def parse_json( json_object: A JSON object describing the Struct. error_handler: An ErrorHandler instance used for printing errors. struct_context: The ANTLR struct context the struct corresponds to. + struct_class: The class of the struct from which the struct object is created. Returns: A Struct object representing the initialized Struct. """ - struct = Struct() + struct = struct_class() struct.context = struct_context for identifier, value in json_object.items(): @@ -129,9 +148,9 @@ def parse_json( array.type_of_elements = "string" array.append_value(element) elif isinstance(element, dict): - inner_struct = parse_json(element, error_handler, struct_context) + inner_struct = parse_json(element, error_handler, struct_context, struct_class) array.append_value(inner_struct) elif isinstance(value, dict): - inner_struct = parse_json(value, error_handler, struct_context) + inner_struct = parse_json(value, error_handler, struct_context, struct_class) struct.attributes[identifier] = inner_struct return struct diff --git a/pfdl_scheduler/parser/PFDLLexer.py b/pfdl_scheduler/parser/PFDLLexer.py index 896d888..0cf1709 100644 --- a/pfdl_scheduler/parser/PFDLLexer.py +++ b/pfdl_scheduler/parser/PFDLLexer.py @@ -1,7 +1,8 @@ -# Generated from PFDLLexer.g4 by ANTLR 4.9.3 +# Generated from temp/PFDLLexer.g4 by ANTLR 4.9.3 from antlr4 import * from io import StringIO import sys + if sys.version_info[1] > 5: from typing import TextIO else: @@ -12,201 +13,287 @@ from pfdl_scheduler.parser.PFDLParser import PFDLParser - def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2A") - buf.write("\u01b7\b\1\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2U") + buf.write("\u028a\b\1\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6") buf.write("\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r") buf.write("\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22") buf.write("\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30") buf.write("\t\30\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35") - buf.write("\4\36\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4") - buf.write("%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t") + buf.write('\4\36\t\36\4\37\t\37\4 \t \4!\t!\4"\t"\4#\t#\4$\t$\4') + buf.write("%\t%\4&\t&\4'\t'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t") buf.write("-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63") buf.write("\4\64\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4") - buf.write(":\t:\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\3\2\3\2\3\2\3") - buf.write("\2\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\5\3\5") - buf.write("\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3") - buf.write("\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\n\3\n") - buf.write("\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3") - buf.write("\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r") - buf.write("\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3") - buf.write("\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20") + buf.write(":\t:\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4") + buf.write("C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4") + buf.write("L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\3") + buf.write("\2\3\2\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\4\3\4") + buf.write("\3\4\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3") + buf.write("\5\3\5\3\6\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7") + buf.write("\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3") + buf.write("\n\3\n\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\f\3\f\3\f") + buf.write("\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3") + buf.write("\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r") + buf.write("\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3") + buf.write("\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17") + buf.write("\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20") + buf.write("\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21") buf.write("\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22") - buf.write("\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\25") - buf.write("\3\25\3\26\3\26\3\27\3\27\3\27\3\27\3\30\3\30\3\31\3\31") - buf.write("\3\32\3\32\3\33\3\33\7\33\u0103\n\33\f\33\16\33\u0106") - buf.write("\13\33\3\33\3\33\3\34\6\34\u010b\n\34\r\34\16\34\u010c") - buf.write("\3\34\3\34\3\35\5\35\u0112\n\35\3\35\3\35\7\35\u0116\n") - buf.write("\35\f\35\16\35\u0119\13\35\3\36\3\36\3\37\3\37\3 \3 \3") - buf.write("!\3!\3!\3\"\3\"\3#\3#\3#\3$\3$\3$\3%\3%\3%\3&\3&\3&\3") - buf.write("&\3\'\3\'\3\'\3(\3(\3)\3)\3*\3*\3+\3+\3,\3,\3-\6-\u0141") - buf.write("\n-\r-\16-\u0142\3.\3.\3.\3.\3/\3/\3/\3/\7/\u014d\n/\f") - buf.write("/\16/\u0150\13/\3/\3/\3\60\3\60\7\60\u0156\n\60\f\60\16") - buf.write("\60\u0159\13\60\3\61\3\61\7\61\u015d\n\61\f\61\16\61\u0160") - buf.write("\13\61\3\62\3\62\3\62\3\62\7\62\u0166\n\62\f\62\16\62") - buf.write("\u0169\13\62\3\62\3\62\3\63\3\63\3\63\3\63\3\63\3\64\3") - buf.write("\64\3\64\3\64\3\64\3\64\3\65\3\65\3\66\3\66\3\67\3\67") - buf.write("\6\67\u017e\n\67\r\67\16\67\u017f\3\67\3\67\38\38\39\3") - buf.write("9\3:\3:\3;\5;\u018b\n;\3;\3;\3;\6;\u0190\n;\r;\16;\u0191") - buf.write("\5;\u0194\n;\3;\5;\u0197\n;\3<\3<\3<\7<\u019c\n<\f<\16") - buf.write("<\u019f\13<\5<\u01a1\n<\3=\3=\5=\u01a5\n=\3=\3=\3>\6>") - buf.write("\u01aa\n>\r>\16>\u01ab\3>\3>\3?\3?\3?\3?\3@\3@\3@\3@\4") - buf.write("\u014e\u0167\2A\4\5\6\6\b\7\n\b\f\t\16\n\20\13\22\f\24") - buf.write("\r\26\16\30\17\32\20\34\21\36\22 \23\"\24$\25&\26(\27") - buf.write("*\30,\31.\32\60\33\62\34\64\35\66\368\37: \"@#B$D%") - buf.write("F&H\'J(L)N*P+R,T-V.X/Z\60\\\61^\62`\63b\64d\65f\66h\67") - buf.write("j8l9n:p;rx\2z\2|?~@\u0080A\4\2\3\f\3\2\f\f\4\2\13") - buf.write("\13\"\"\3\2\62;\3\2c|\6\2\62;C\\aac|\3\2C\\\3\2\63;\4") - buf.write("\2GGgg\4\2--//\5\2\13\f\17\17\"\"\2\u01c7\2\4\3\2\2\2") + buf.write("\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23") + buf.write("\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24") + buf.write("\3\24\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\27\3\27\3\27") + buf.write("\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\31\3\31") + buf.write("\3\31\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34") + buf.write("\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\36\3\36\3\36") + buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3\37") + buf.write("\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3!\3!\3") + buf.write('!\3!\3!\3!\3!\3"\3"\3"\3"\3#\3#\3#\3#\3#\3#\3#\3$') + buf.write("\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3") + buf.write("&\3&\3'\3'\3'\3'\3'\3'\3(\3(\3)\3)\3*\3*\3+\3+\3") + buf.write("+\3+\3,\3,\3-\3-\3.\3.\3/\3/\7/\u01d6\n/\f/\16/\u01d9") + buf.write("\13/\3/\3/\3\60\6\60\u01de\n\60\r\60\16\60\u01df\3\60") + buf.write("\3\60\3\61\5\61\u01e5\n\61\3\61\3\61\7\61\u01e9\n\61\f") + buf.write("\61\16\61\u01ec\13\61\3\62\3\62\3\63\3\63\3\64\3\64\3") + buf.write("\65\3\65\3\65\3\66\3\66\3\67\3\67\3\67\38\38\38\39\39") + buf.write("\39\3:\3:\3:\3:\3;\3;\3;\3<\3<\3=\3=\3>\3>\3?\3?\3@\3") + buf.write("@\3A\6A\u0214\nA\rA\16A\u0215\3B\3B\3B\3B\3C\3C\3C\3C") + buf.write("\7C\u0220\nC\fC\16C\u0223\13C\3C\3C\3D\3D\7D\u0229\nD") + buf.write("\fD\16D\u022c\13D\3E\3E\7E\u0230\nE\fE\16E\u0233\13E\3") + buf.write("F\3F\3F\3F\7F\u0239\nF\fF\16F\u023c\13F\3F\3F\3G\3G\3") + buf.write("G\3G\3G\3H\3H\3H\3H\3H\3H\3I\3I\3J\3J\3K\3K\6K\u0251\n") + buf.write("K\rK\16K\u0252\3K\3K\3L\3L\3M\3M\3N\3N\3O\5O\u025e\nO") + buf.write("\3O\3O\3O\6O\u0263\nO\rO\16O\u0264\5O\u0267\nO\3O\5O\u026a") + buf.write("\nO\3P\3P\3P\7P\u026f\nP\fP\16P\u0272\13P\5P\u0274\nP") + buf.write("\3Q\3Q\5Q\u0278\nQ\3Q\3Q\3R\6R\u027d\nR\rR\16R\u027e\3") + buf.write("R\3R\3S\3S\3S\3S\3T\3T\3T\3T\4\u0221\u023a\2U\4\5\6\6") + buf.write("\b\7\n\b\f\t\16\n\20\13\22\f\24\r\26\16\30\17\32\20\34") + buf.write('\21\36\22 \23"\24$\25&\26(\27*\30,\31.\32\60\33\62\34') + buf.write("\64\35\66\368\37: \"@#B$D%F&H'J(L)N*P+R,T-V.X/Z\60") + buf.write("\\\61^\62`\63b\64d\65f\66h\67j8l9n:p;rx?z@|A~B\u0080") + buf.write("C\u0082D\u0084E\u0086F\u0088G\u008aH\u008cI\u008eJ\u0090") + buf.write("K\u0092L\u0094M\u0096N\u0098O\u009aP\u009cQ\u009eR\u00a0") + buf.write("\2\u00a2\2\u00a4S\u00a6T\u00a8U\4\2\3\f\3\2\f\f\4\2\13") + buf.write('\13""\3\2\62;\3\2c|\6\2\62;C\\aac|\3\2C\\\3\2\63;\4') + buf.write('\2GGgg\4\2--//\5\2\13\f\17\17""\2\u029a\2\4\3\2\2\2') buf.write("\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2\16") buf.write("\3\2\2\2\2\20\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3") buf.write("\2\2\2\2\30\3\2\2\2\2\32\3\2\2\2\2\34\3\2\2\2\2\36\3\2") - buf.write("\2\2\2 \3\2\2\2\2\"\3\2\2\2\2$\3\2\2\2\2&\3\2\2\2\2(\3") + buf.write('\2\2\2 \3\2\2\2\2"\3\2\2\2\2$\3\2\2\2\2&\3\2\2\2\2(\3') buf.write("\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60\3\2\2\2\2") buf.write("\62\3\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3") buf.write("\2\2\2\2<\3\2\2\2\2>\3\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D") buf.write("\3\2\2\2\2F\3\2\2\2\2H\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2\2") buf.write("N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2\2\2\2V\3\2\2\2") buf.write("\2X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2") - buf.write("\2\2b\3\2\2\2\3d\3\2\2\2\3f\3\2\2\2\3h\3\2\2\2\3j\3\2") - buf.write("\2\2\3l\3\2\2\2\3n\3\2\2\2\3p\3\2\2\2\3r\3\2\2\2\3t\3") - buf.write("\2\2\2\3v\3\2\2\2\3|\3\2\2\2\3~\3\2\2\2\3\u0080\3\2\2") - buf.write("\2\4\u0082\3\2\2\2\6\u0089\3\2\2\2\b\u008e\3\2\2\2\n\u0091") - buf.write("\3\2\2\2\f\u0095\3\2\2\2\16\u009a\3\2\2\2\20\u00a0\3\2") - buf.write("\2\2\22\u00a3\3\2\2\2\24\u00ac\3\2\2\2\26\u00b6\3\2\2") - buf.write("\2\30\u00bd\3\2\2\2\32\u00c4\3\2\2\2\34\u00cb\3\2\2\2") - buf.write("\36\u00cf\3\2\2\2 \u00d6\3\2\2\2\"\u00dd\3\2\2\2$\u00e5") - buf.write("\3\2\2\2&\u00ea\3\2\2\2(\u00f0\3\2\2\2*\u00f2\3\2\2\2") - buf.write(",\u00f4\3\2\2\2.\u00f6\3\2\2\2\60\u00fa\3\2\2\2\62\u00fc") - buf.write("\3\2\2\2\64\u00fe\3\2\2\2\66\u0100\3\2\2\28\u010a\3\2") - buf.write("\2\2:\u0111\3\2\2\2<\u011a\3\2\2\2>\u011c\3\2\2\2@\u011e") - buf.write("\3\2\2\2B\u0120\3\2\2\2D\u0123\3\2\2\2F\u0125\3\2\2\2") - buf.write("H\u0128\3\2\2\2J\u012b\3\2\2\2L\u012e\3\2\2\2N\u0132\3") - buf.write("\2\2\2P\u0135\3\2\2\2R\u0137\3\2\2\2T\u0139\3\2\2\2V\u013b") - buf.write("\3\2\2\2X\u013d\3\2\2\2Z\u0140\3\2\2\2\\\u0144\3\2\2\2") - buf.write("^\u0148\3\2\2\2`\u0153\3\2\2\2b\u015a\3\2\2\2d\u0161\3") - buf.write("\2\2\2f\u016c\3\2\2\2h\u0171\3\2\2\2j\u0177\3\2\2\2l\u0179") - buf.write("\3\2\2\2n\u017b\3\2\2\2p\u0183\3\2\2\2r\u0185\3\2\2\2") - buf.write("t\u0187\3\2\2\2v\u018a\3\2\2\2x\u01a0\3\2\2\2z\u01a2\3") - buf.write("\2\2\2|\u01a9\3\2\2\2~\u01af\3\2\2\2\u0080\u01b3\3\2\2") - buf.write("\2\u0082\u0083\7U\2\2\u0083\u0084\7v\2\2\u0084\u0085\7") - buf.write("t\2\2\u0085\u0086\7w\2\2\u0086\u0087\7e\2\2\u0087\u0088") - buf.write("\7v\2\2\u0088\5\3\2\2\2\u0089\u008a\7V\2\2\u008a\u008b") - buf.write("\7c\2\2\u008b\u008c\7u\2\2\u008c\u008d\7m\2\2\u008d\7") - buf.write("\3\2\2\2\u008e\u008f\7K\2\2\u008f\u0090\7p\2\2\u0090\t") - buf.write("\3\2\2\2\u0091\u0092\7Q\2\2\u0092\u0093\7w\2\2\u0093\u0094") - buf.write("\7v\2\2\u0094\13\3\2\2\2\u0095\u0096\7N\2\2\u0096\u0097") - buf.write("\7q\2\2\u0097\u0098\7q\2\2\u0098\u0099\7r\2\2\u0099\r") - buf.write("\3\2\2\2\u009a\u009b\7Y\2\2\u009b\u009c\7j\2\2\u009c\u009d") - buf.write("\7k\2\2\u009d\u009e\7n\2\2\u009e\u009f\7g\2\2\u009f\17") - buf.write("\3\2\2\2\u00a0\u00a1\7V\2\2\u00a1\u00a2\7q\2\2\u00a2\21") - buf.write("\3\2\2\2\u00a3\u00a4\7R\2\2\u00a4\u00a5\7c\2\2\u00a5\u00a6") - buf.write("\7t\2\2\u00a6\u00a7\7c\2\2\u00a7\u00a8\7n\2\2\u00a8\u00a9") - buf.write("\7n\2\2\u00a9\u00aa\7g\2\2\u00aa\u00ab\7n\2\2\u00ab\23") - buf.write("\3\2\2\2\u00ac\u00ad\7E\2\2\u00ad\u00ae\7q\2\2\u00ae\u00af") - buf.write("\7p\2\2\u00af\u00b0\7f\2\2\u00b0\u00b1\7k\2\2\u00b1\u00b2") - buf.write("\7v\2\2\u00b2\u00b3\7k\2\2\u00b3\u00b4\7q\2\2\u00b4\u00b5") - buf.write("\7p\2\2\u00b5\25\3\2\2\2\u00b6\u00b7\7R\2\2\u00b7\u00b8") - buf.write("\7c\2\2\u00b8\u00b9\7u\2\2\u00b9\u00ba\7u\2\2\u00ba\u00bb") - buf.write("\7g\2\2\u00bb\u00bc\7f\2\2\u00bc\27\3\2\2\2\u00bd\u00be") - buf.write("\7H\2\2\u00be\u00bf\7c\2\2\u00bf\u00c0\7k\2\2\u00c0\u00c1") - buf.write("\7n\2\2\u00c1\u00c2\7g\2\2\u00c2\u00c3\7f\2\2\u00c3\31") - buf.write("\3\2\2\2\u00c4\u00c5\7Q\2\2\u00c5\u00c6\7p\2\2\u00c6\u00c7") - buf.write("\7F\2\2\u00c7\u00c8\7q\2\2\u00c8\u00c9\7p\2\2\u00c9\u00ca") - buf.write("\7g\2\2\u00ca\33\3\2\2\2\u00cb\u00cc\7G\2\2\u00cc\u00cd") - buf.write("\7p\2\2\u00cd\u00ce\7f\2\2\u00ce\35\3\2\2\2\u00cf\u00d0") - buf.write("\7p\2\2\u00d0\u00d1\7w\2\2\u00d1\u00d2\7o\2\2\u00d2\u00d3") - buf.write("\7d\2\2\u00d3\u00d4\7g\2\2\u00d4\u00d5\7t\2\2\u00d5\37") - buf.write("\3\2\2\2\u00d6\u00d7\7u\2\2\u00d7\u00d8\7v\2\2\u00d8\u00d9") - buf.write("\7t\2\2\u00d9\u00da\7k\2\2\u00da\u00db\7p\2\2\u00db\u00dc") - buf.write("\7i\2\2\u00dc!\3\2\2\2\u00dd\u00de\7d\2\2\u00de\u00df") - buf.write("\7q\2\2\u00df\u00e0\7q\2\2\u00e0\u00e1\7n\2\2\u00e1\u00e2") - buf.write("\7g\2\2\u00e2\u00e3\7c\2\2\u00e3\u00e4\7p\2\2\u00e4#\3") - buf.write("\2\2\2\u00e5\u00e6\7v\2\2\u00e6\u00e7\7t\2\2\u00e7\u00e8") - buf.write("\7w\2\2\u00e8\u00e9\7g\2\2\u00e9%\3\2\2\2\u00ea\u00eb") - buf.write("\7h\2\2\u00eb\u00ec\7c\2\2\u00ec\u00ed\7n\2\2\u00ed\u00ee") - buf.write("\7u\2\2\u00ee\u00ef\7g\2\2\u00ef\'\3\2\2\2\u00f0\u00f1") - buf.write("\7<\2\2\u00f1)\3\2\2\2\u00f2\u00f3\7\60\2\2\u00f3+\3\2") - buf.write("\2\2\u00f4\u00f5\7.\2\2\u00f5-\3\2\2\2\u00f6\u00f7\7}") - buf.write("\2\2\u00f7\u00f8\3\2\2\2\u00f8\u00f9\b\27\2\2\u00f9/\3") - buf.write("\2\2\2\u00fa\u00fb\7$\2\2\u00fb\61\3\2\2\2\u00fc\u00fd") - buf.write("\7]\2\2\u00fd\63\3\2\2\2\u00fe\u00ff\7_\2\2\u00ff\65\3") - buf.write("\2\2\2\u0100\u0104\7%\2\2\u0101\u0103\n\2\2\2\u0102\u0101") - buf.write("\3\2\2\2\u0103\u0106\3\2\2\2\u0104\u0102\3\2\2\2\u0104") - buf.write("\u0105\3\2\2\2\u0105\u0107\3\2\2\2\u0106\u0104\3\2\2\2") - buf.write("\u0107\u0108\b\33\3\2\u0108\67\3\2\2\2\u0109\u010b\t\3") - buf.write("\2\2\u010a\u0109\3\2\2\2\u010b\u010c\3\2\2\2\u010c\u010a") - buf.write("\3\2\2\2\u010c\u010d\3\2\2\2\u010d\u010e\3\2\2\2\u010e") - buf.write("\u010f\b\34\3\2\u010f9\3\2\2\2\u0110\u0112\7\17\2\2\u0111") - buf.write("\u0110\3\2\2\2\u0111\u0112\3\2\2\2\u0112\u0113\3\2\2\2") - buf.write("\u0113\u0117\7\f\2\2\u0114\u0116\7\"\2\2\u0115\u0114\3") - buf.write("\2\2\2\u0116\u0119\3\2\2\2\u0117\u0115\3\2\2\2\u0117\u0118") - buf.write("\3\2\2\2\u0118;\3\2\2\2\u0119\u0117\3\2\2\2\u011a\u011b") - buf.write("\7*\2\2\u011b=\3\2\2\2\u011c\u011d\7+\2\2\u011d?\3\2\2") - buf.write("\2\u011e\u011f\7>\2\2\u011fA\3\2\2\2\u0120\u0121\7>\2") - buf.write("\2\u0121\u0122\7?\2\2\u0122C\3\2\2\2\u0123\u0124\7@\2") - buf.write("\2\u0124E\3\2\2\2\u0125\u0126\7@\2\2\u0126\u0127\7?\2") - buf.write("\2\u0127G\3\2\2\2\u0128\u0129\7?\2\2\u0129\u012a\7?\2") - buf.write("\2\u012aI\3\2\2\2\u012b\u012c\7#\2\2\u012c\u012d\7?\2") - buf.write("\2\u012dK\3\2\2\2\u012e\u012f\7C\2\2\u012f\u0130\7p\2") - buf.write("\2\u0130\u0131\7f\2\2\u0131M\3\2\2\2\u0132\u0133\7Q\2") - buf.write("\2\u0133\u0134\7t\2\2\u0134O\3\2\2\2\u0135\u0136\7#\2") - buf.write("\2\u0136Q\3\2\2\2\u0137\u0138\7,\2\2\u0138S\3\2\2\2\u0139") - buf.write("\u013a\7\61\2\2\u013aU\3\2\2\2\u013b\u013c\7/\2\2\u013c") - buf.write("W\3\2\2\2\u013d\u013e\7-\2\2\u013eY\3\2\2\2\u013f\u0141") - buf.write("\t\4\2\2\u0140\u013f\3\2\2\2\u0141\u0142\3\2\2\2\u0142") - buf.write("\u0140\3\2\2\2\u0142\u0143\3\2\2\2\u0143[\3\2\2\2\u0144") - buf.write("\u0145\5Z-\2\u0145\u0146\7\60\2\2\u0146\u0147\5Z-\2\u0147") - buf.write("]\3\2\2\2\u0148\u014e\7$\2\2\u0149\u014a\7^\2\2\u014a") - buf.write("\u014d\7$\2\2\u014b\u014d\13\2\2\2\u014c\u0149\3\2\2\2") - buf.write("\u014c\u014b\3\2\2\2\u014d\u0150\3\2\2\2\u014e\u014f\3") - buf.write("\2\2\2\u014e\u014c\3\2\2\2\u014f\u0151\3\2\2\2\u0150\u014e") - buf.write("\3\2\2\2\u0151\u0152\7$\2\2\u0152_\3\2\2\2\u0153\u0157") - buf.write("\t\5\2\2\u0154\u0156\t\6\2\2\u0155\u0154\3\2\2\2\u0156") - buf.write("\u0159\3\2\2\2\u0157\u0155\3\2\2\2\u0157\u0158\3\2\2\2") - buf.write("\u0158a\3\2\2\2\u0159\u0157\3\2\2\2\u015a\u015e\t\7\2") - buf.write("\2\u015b\u015d\t\6\2\2\u015c\u015b\3\2\2\2\u015d\u0160") - buf.write("\3\2\2\2\u015e\u015c\3\2\2\2\u015e\u015f\3\2\2\2\u015f") - buf.write("c\3\2\2\2\u0160\u015e\3\2\2\2\u0161\u0167\7$\2\2\u0162") - buf.write("\u0163\7^\2\2\u0163\u0166\7$\2\2\u0164\u0166\13\2\2\2") - buf.write("\u0165\u0162\3\2\2\2\u0165\u0164\3\2\2\2\u0166\u0169\3") - buf.write("\2\2\2\u0167\u0168\3\2\2\2\u0167\u0165\3\2\2\2\u0168\u016a") - buf.write("\3\2\2\2\u0169\u0167\3\2\2\2\u016a\u016b\7$\2\2\u016b") - buf.write("e\3\2\2\2\u016c\u016d\7v\2\2\u016d\u016e\7t\2\2\u016e") - buf.write("\u016f\7w\2\2\u016f\u0170\7g\2\2\u0170g\3\2\2\2\u0171") - buf.write("\u0172\7h\2\2\u0172\u0173\7c\2\2\u0173\u0174\7n\2\2\u0174") - buf.write("\u0175\7u\2\2\u0175\u0176\7g\2\2\u0176i\3\2\2\2\u0177") - buf.write("\u0178\7<\2\2\u0178k\3\2\2\2\u0179\u017a\7$\2\2\u017a") - buf.write("m\3\2\2\2\u017b\u017d\7%\2\2\u017c\u017e\n\2\2\2\u017d") - buf.write("\u017c\3\2\2\2\u017e\u017f\3\2\2\2\u017f\u017d\3\2\2\2") - buf.write("\u017f\u0180\3\2\2\2\u0180\u0181\3\2\2\2\u0181\u0182\b") - buf.write("\67\3\2\u0182o\3\2\2\2\u0183\u0184\7]\2\2\u0184q\3\2\2") - buf.write("\2\u0185\u0186\7_\2\2\u0186s\3\2\2\2\u0187\u0188\7.\2") - buf.write("\2\u0188u\3\2\2\2\u0189\u018b\7/\2\2\u018a\u0189\3\2\2") - buf.write("\2\u018a\u018b\3\2\2\2\u018b\u018c\3\2\2\2\u018c\u0193") - buf.write("\5x<\2\u018d\u018f\7\60\2\2\u018e\u0190\t\4\2\2\u018f") - buf.write("\u018e\3\2\2\2\u0190\u0191\3\2\2\2\u0191\u018f\3\2\2\2") - buf.write("\u0191\u0192\3\2\2\2\u0192\u0194\3\2\2\2\u0193\u018d\3") - buf.write("\2\2\2\u0193\u0194\3\2\2\2\u0194\u0196\3\2\2\2\u0195\u0197") - buf.write("\5z=\2\u0196\u0195\3\2\2\2\u0196\u0197\3\2\2\2\u0197w") - buf.write("\3\2\2\2\u0198\u01a1\7\62\2\2\u0199\u019d\t\b\2\2\u019a") - buf.write("\u019c\t\4\2\2\u019b\u019a\3\2\2\2\u019c\u019f\3\2\2\2") - buf.write("\u019d\u019b\3\2\2\2\u019d\u019e\3\2\2\2\u019e\u01a1\3") - buf.write("\2\2\2\u019f\u019d\3\2\2\2\u01a0\u0198\3\2\2\2\u01a0\u0199") - buf.write("\3\2\2\2\u01a1y\3\2\2\2\u01a2\u01a4\t\t\2\2\u01a3\u01a5") - buf.write("\t\n\2\2\u01a4\u01a3\3\2\2\2\u01a4\u01a5\3\2\2\2\u01a5") - buf.write("\u01a6\3\2\2\2\u01a6\u01a7\5x<\2\u01a7{\3\2\2\2\u01a8") - buf.write("\u01aa\t\13\2\2\u01a9\u01a8\3\2\2\2\u01aa\u01ab\3\2\2") - buf.write("\2\u01ab\u01a9\3\2\2\2\u01ab\u01ac\3\2\2\2\u01ac\u01ad") - buf.write("\3\2\2\2\u01ad\u01ae\b>\3\2\u01ae}\3\2\2\2\u01af\u01b0") - buf.write("\7}\2\2\u01b0\u01b1\3\2\2\2\u01b1\u01b2\b?\2\2\u01b2\177") - buf.write("\3\2\2\2\u01b3\u01b4\7\177\2\2\u01b4\u01b5\3\2\2\2\u01b5") - buf.write("\u01b6\b@\4\2\u01b6\u0081\3\2\2\2\30\2\3\u0104\u010c\u0111") - buf.write("\u0117\u0142\u014c\u014e\u0157\u015e\u0165\u0167\u017f") - buf.write("\u018a\u0191\u0193\u0196\u019d\u01a0\u01a4\u01ab\5\7\3") - buf.write("\2\b\2\2\6\2\2") + buf.write("\2\2b\3\2\2\2\2d\3\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2") + buf.write("\2\2\2l\3\2\2\2\2n\3\2\2\2\2p\3\2\2\2\2r\3\2\2\2\2t\3") + buf.write("\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3\2\2\2\2|\3\2\2\2\2~") + buf.write("\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3\2\2") + buf.write("\2\2\u0086\3\2\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\3\u008c") + buf.write("\3\2\2\2\3\u008e\3\2\2\2\3\u0090\3\2\2\2\3\u0092\3\2\2") + buf.write("\2\3\u0094\3\2\2\2\3\u0096\3\2\2\2\3\u0098\3\2\2\2\3\u009a") + buf.write("\3\2\2\2\3\u009c\3\2\2\2\3\u009e\3\2\2\2\3\u00a4\3\2\2") + buf.write("\2\3\u00a6\3\2\2\2\3\u00a8\3\2\2\2\4\u00aa\3\2\2\2\6\u00af") + buf.write("\3\2\2\2\b\u00b6\3\2\2\2\n\u00bd\3\2\2\2\f\u00c7\3\2\2") + buf.write("\2\16\u00cc\3\2\2\2\20\u00d3\3\2\2\2\22\u00d8\3\2\2\2") + buf.write("\24\u00db\3\2\2\2\26\u00e2\3\2\2\2\30\u00e9\3\2\2\2\32") + buf.write("\u00fc\3\2\2\2\34\u010a\3\2\2\2\36\u011a\3\2\2\2 \u0126") + buf.write('\3\2\2\2"\u0131\3\2\2\2$\u013b\3\2\2\2&\u0146\3\2\2\2') + buf.write("(\u014f\3\2\2\2*\u0155\3\2\2\2,\u015a\3\2\2\2.\u015c\3") + buf.write("\2\2\2\60\u0163\3\2\2\2\62\u0168\3\2\2\2\64\u016b\3\2") + buf.write("\2\2\66\u016f\3\2\2\28\u0174\3\2\2\2:\u017a\3\2\2\2<\u017d") + buf.write("\3\2\2\2>\u0186\3\2\2\2@\u0190\3\2\2\2B\u0197\3\2\2\2") + buf.write("D\u019e\3\2\2\2F\u01a2\3\2\2\2H\u01a9\3\2\2\2J\u01b0\3") + buf.write("\2\2\2L\u01b8\3\2\2\2N\u01bd\3\2\2\2P\u01c3\3\2\2\2R\u01c5") + buf.write("\3\2\2\2T\u01c7\3\2\2\2V\u01c9\3\2\2\2X\u01cd\3\2\2\2") + buf.write("Z\u01cf\3\2\2\2\\\u01d1\3\2\2\2^\u01d3\3\2\2\2`\u01dd") + buf.write("\3\2\2\2b\u01e4\3\2\2\2d\u01ed\3\2\2\2f\u01ef\3\2\2\2") + buf.write("h\u01f1\3\2\2\2j\u01f3\3\2\2\2l\u01f6\3\2\2\2n\u01f8\3") + buf.write("\2\2\2p\u01fb\3\2\2\2r\u01fe\3\2\2\2t\u0201\3\2\2\2v\u0205") + buf.write("\3\2\2\2x\u0208\3\2\2\2z\u020a\3\2\2\2|\u020c\3\2\2\2") + buf.write("~\u020e\3\2\2\2\u0080\u0210\3\2\2\2\u0082\u0213\3\2\2") + buf.write("\2\u0084\u0217\3\2\2\2\u0086\u021b\3\2\2\2\u0088\u0226") + buf.write("\3\2\2\2\u008a\u022d\3\2\2\2\u008c\u0234\3\2\2\2\u008e") + buf.write("\u023f\3\2\2\2\u0090\u0244\3\2\2\2\u0092\u024a\3\2\2\2") + buf.write("\u0094\u024c\3\2\2\2\u0096\u024e\3\2\2\2\u0098\u0256\3") + buf.write("\2\2\2\u009a\u0258\3\2\2\2\u009c\u025a\3\2\2\2\u009e\u025d") + buf.write("\3\2\2\2\u00a0\u0273\3\2\2\2\u00a2\u0275\3\2\2\2\u00a4") + buf.write("\u027c\3\2\2\2\u00a6\u0282\3\2\2\2\u00a8\u0286\3\2\2\2") + buf.write("\u00aa\u00ab\7T\2\2\u00ab\u00ac\7w\2\2\u00ac\u00ad\7n") + buf.write("\2\2\u00ad\u00ae\7g\2\2\u00ae\5\3\2\2\2\u00af\u00b0\7") + buf.write("O\2\2\u00b0\u00b1\7q\2\2\u00b1\u00b2\7f\2\2\u00b2\u00b3") + buf.write("\7w\2\2\u00b3\u00b4\7n\2\2\u00b4\u00b5\7g\2\2\u00b5\7") + buf.write("\3\2\2\2\u00b6\u00b7\7K\2\2\u00b7\u00b8\7o\2\2\u00b8\u00b9") + buf.write("\7r\2\2\u00b9\u00ba\7q\2\2\u00ba\u00bb\7t\2\2\u00bb\u00bc") + buf.write("\7v\2\2\u00bc\t\3\2\2\2\u00bd\u00be\7V\2\2\u00be\u00bf") + buf.write("\7t\2\2\u00bf\u00c0\7c\2\2\u00c0\u00c1\7p\2\2\u00c1\u00c2") + buf.write("\7u\2\2\u00c2\u00c3\7r\2\2\u00c3\u00c4\7q\2\2\u00c4\u00c5") + buf.write("\7t\2\2\u00c5\u00c6\7v\2\2\u00c6\13\3\2\2\2\u00c7\u00c8") + buf.write("\7O\2\2\u00c8\u00c9\7q\2\2\u00c9\u00ca\7x\2\2\u00ca\u00cb") + buf.write("\7g\2\2\u00cb\r\3\2\2\2\u00cc\u00cd\7C\2\2\u00cd\u00ce") + buf.write("\7e\2\2\u00ce\u00cf\7v\2\2\u00cf\u00d0\7k\2\2\u00d0\u00d1") + buf.write("\7q\2\2\u00d1\u00d2\7p\2\2\u00d2\17\3\2\2\2\u00d3\u00d4") + buf.write("\7H\2\2\u00d4\u00d5\7t\2\2\u00d5\u00d6\7q\2\2\u00d6\u00d7") + buf.write("\7o\2\2\u00d7\21\3\2\2\2\u00d8\u00d9\7F\2\2\u00d9\u00da") + buf.write("\7q\2\2\u00da\23\3\2\2\2\u00db\u00dc\7T\2\2\u00dc\u00dd") + buf.write("\7g\2\2\u00dd\u00de\7r\2\2\u00de\u00df\7g\2\2\u00df\u00e0") + buf.write("\7c\2\2\u00e0\u00e1\7v\2\2\u00e1\25\3\2\2\2\u00e2\u00e3") + buf.write("\7Q\2\2\u00e3\u00e4\7p\2\2\u00e4\u00e5\7F\2\2\u00e5\u00e6") + buf.write("\7q\2\2\u00e6\u00e7\7p\2\2\u00e7\u00e8\7g\2\2\u00e8\27") + buf.write("\3\2\2\2\u00e9\u00ea\7V\2\2\u00ea\u00eb\7t\2\2\u00eb\u00ec") + buf.write("\7c\2\2\u00ec\u00ed\7p\2\2\u00ed\u00ee\7u\2\2\u00ee\u00ef") + buf.write("\7r\2\2\u00ef\u00f0\7q\2\2\u00f0\u00f1\7t\2\2\u00f1\u00f2") + buf.write("\7v\2\2\u00f2\u00f3\7Q\2\2\u00f3\u00f4\7t\2\2\u00f4\u00f5") + buf.write("\7f\2\2\u00f5\u00f6\7g\2\2\u00f6\u00f7\7t\2\2\u00f7\u00f8") + buf.write("\7U\2\2\u00f8\u00f9\7v\2\2\u00f9\u00fa\7g\2\2\u00fa\u00fb") + buf.write("\7r\2\2\u00fb\31\3\2\2\2\u00fc\u00fd\7O\2\2\u00fd\u00fe") + buf.write("\7q\2\2\u00fe\u00ff\7x\2\2\u00ff\u0100\7g\2\2\u0100\u0101") + buf.write("\7Q\2\2\u0101\u0102\7t\2\2\u0102\u0103\7f\2\2\u0103\u0104") + buf.write("\7g\2\2\u0104\u0105\7t\2\2\u0105\u0106\7U\2\2\u0106\u0107") + buf.write("\7v\2\2\u0107\u0108\7g\2\2\u0108\u0109\7r\2\2\u0109\33") + buf.write("\3\2\2\2\u010a\u010b\7C\2\2\u010b\u010c\7e\2\2\u010c\u010d") + buf.write("\7v\2\2\u010d\u010e\7k\2\2\u010e\u010f\7q\2\2\u010f\u0110") + buf.write("\7p\2\2\u0110\u0111\7Q\2\2\u0111\u0112\7t\2\2\u0112\u0113") + buf.write("\7f\2\2\u0113\u0114\7g\2\2\u0114\u0115\7t\2\2\u0115\u0116") + buf.write("\7U\2\2\u0116\u0117\7v\2\2\u0117\u0118\7g\2\2\u0118\u0119") + buf.write("\7r\2\2\u0119\35\3\2\2\2\u011a\u011b\7E\2\2\u011b\u011c") + buf.write("\7q\2\2\u011c\u011d\7p\2\2\u011d\u011e\7u\2\2\u011e\u011f") + buf.write("\7v\2\2\u011f\u0120\7t\2\2\u0120\u0121\7c\2\2\u0121\u0122") + buf.write("\7k\2\2\u0122\u0123\7p\2\2\u0123\u0124\7v\2\2\u0124\u0125") + buf.write("\7u\2\2\u0125\37\3\2\2\2\u0126\u0127\7R\2\2\u0127\u0128") + buf.write("\7c\2\2\u0128\u0129\7t\2\2\u0129\u012a\7c\2\2\u012a\u012b") + buf.write("\7o\2\2\u012b\u012c\7g\2\2\u012c\u012d\7v\2\2\u012d\u012e") + buf.write("\7g\2\2\u012e\u012f\7t\2\2\u012f\u0130\7u\2\2\u0130!\3") + buf.write("\2\2\2\u0131\u0132\7U\2\2\u0132\u0133\7v\2\2\u0133\u0134") + buf.write("\7c\2\2\u0134\u0135\7t\2\2\u0135\u0136\7v\2\2\u0136\u0137") + buf.write("\7g\2\2\u0137\u0138\7f\2\2\u0138\u0139\7D\2\2\u0139\u013a") + buf.write("\7{\2\2\u013a#\3\2\2\2\u013b\u013c\7H\2\2\u013c\u013d") + buf.write("\7k\2\2\u013d\u013e\7p\2\2\u013e\u013f\7k\2\2\u013f\u0140") + buf.write("\7u\2\2\u0140\u0141\7j\2\2\u0141\u0142\7g\2\2\u0142\u0143") + buf.write("\7f\2\2\u0143\u0144\7D\2\2\u0144\u0145\7{\2\2\u0145%\3") + buf.write("\2\2\2\u0146\u0147\7N\2\2\u0147\u0148\7q\2\2\u0148\u0149") + buf.write("\7e\2\2\u0149\u014a\7c\2\2\u014a\u014b\7v\2\2\u014b\u014c") + buf.write("\7k\2\2\u014c\u014d\7q\2\2\u014d\u014e\7p\2\2\u014e'") + buf.write("\3\2\2\2\u014f\u0150\7G\2\2\u0150\u0151\7x\2\2\u0151\u0152") + buf.write("\7g\2\2\u0152\u0153\7p\2\2\u0153\u0154\7v\2\2\u0154)\3") + buf.write("\2\2\2\u0155\u0156\7V\2\2\u0156\u0157\7k\2\2\u0157\u0158") + buf.write("\7o\2\2\u0158\u0159\7g\2\2\u0159+\3\2\2\2\u015a\u015b") + buf.write("\7?\2\2\u015b-\3\2\2\2\u015c\u015d\7U\2\2\u015d\u015e") + buf.write("\7v\2\2\u015e\u015f\7t\2\2\u015f\u0160\7w\2\2\u0160\u0161") + buf.write("\7e\2\2\u0161\u0162\7v\2\2\u0162/\3\2\2\2\u0163\u0164") + buf.write("\7V\2\2\u0164\u0165\7c\2\2\u0165\u0166\7u\2\2\u0166\u0167") + buf.write("\7m\2\2\u0167\61\3\2\2\2\u0168\u0169\7K\2\2\u0169\u016a") + buf.write("\7p\2\2\u016a\63\3\2\2\2\u016b\u016c\7Q\2\2\u016c\u016d") + buf.write("\7w\2\2\u016d\u016e\7v\2\2\u016e\65\3\2\2\2\u016f\u0170") + buf.write("\7N\2\2\u0170\u0171\7q\2\2\u0171\u0172\7q\2\2\u0172\u0173") + buf.write("\7r\2\2\u0173\67\3\2\2\2\u0174\u0175\7Y\2\2\u0175\u0176") + buf.write("\7j\2\2\u0176\u0177\7k\2\2\u0177\u0178\7n\2\2\u0178\u0179") + buf.write("\7g\2\2\u01799\3\2\2\2\u017a\u017b\7V\2\2\u017b\u017c") + buf.write("\7q\2\2\u017c;\3\2\2\2\u017d\u017e\7R\2\2\u017e\u017f") + buf.write("\7c\2\2\u017f\u0180\7t\2\2\u0180\u0181\7c\2\2\u0181\u0182") + buf.write("\7n\2\2\u0182\u0183\7n\2\2\u0183\u0184\7g\2\2\u0184\u0185") + buf.write("\7n\2\2\u0185=\3\2\2\2\u0186\u0187\7E\2\2\u0187\u0188") + buf.write("\7q\2\2\u0188\u0189\7p\2\2\u0189\u018a\7f\2\2\u018a\u018b") + buf.write("\7k\2\2\u018b\u018c\7v\2\2\u018c\u018d\7k\2\2\u018d\u018e") + buf.write("\7q\2\2\u018e\u018f\7p\2\2\u018f?\3\2\2\2\u0190\u0191") + buf.write("\7R\2\2\u0191\u0192\7c\2\2\u0192\u0193\7u\2\2\u0193\u0194") + buf.write("\7u\2\2\u0194\u0195\7g\2\2\u0195\u0196\7f\2\2\u0196A\3") + buf.write("\2\2\2\u0197\u0198\7H\2\2\u0198\u0199\7c\2\2\u0199\u019a") + buf.write("\7k\2\2\u019a\u019b\7n\2\2\u019b\u019c\7g\2\2\u019c\u019d") + buf.write("\7f\2\2\u019dC\3\2\2\2\u019e\u019f\7G\2\2\u019f\u01a0") + buf.write("\7p\2\2\u01a0\u01a1\7f\2\2\u01a1E\3\2\2\2\u01a2\u01a3") + buf.write("\7p\2\2\u01a3\u01a4\7w\2\2\u01a4\u01a5\7o\2\2\u01a5\u01a6") + buf.write("\7d\2\2\u01a6\u01a7\7g\2\2\u01a7\u01a8\7t\2\2\u01a8G\3") + buf.write("\2\2\2\u01a9\u01aa\7u\2\2\u01aa\u01ab\7v\2\2\u01ab\u01ac") + buf.write("\7t\2\2\u01ac\u01ad\7k\2\2\u01ad\u01ae\7p\2\2\u01ae\u01af") + buf.write("\7i\2\2\u01afI\3\2\2\2\u01b0\u01b1\7d\2\2\u01b1\u01b2") + buf.write("\7q\2\2\u01b2\u01b3\7q\2\2\u01b3\u01b4\7n\2\2\u01b4\u01b5") + buf.write("\7g\2\2\u01b5\u01b6\7c\2\2\u01b6\u01b7\7p\2\2\u01b7K\3") + buf.write("\2\2\2\u01b8\u01b9\7v\2\2\u01b9\u01ba\7t\2\2\u01ba\u01bb") + buf.write("\7w\2\2\u01bb\u01bc\7g\2\2\u01bcM\3\2\2\2\u01bd\u01be") + buf.write("\7h\2\2\u01be\u01bf\7c\2\2\u01bf\u01c0\7n\2\2\u01c0\u01c1") + buf.write("\7u\2\2\u01c1\u01c2\7g\2\2\u01c2O\3\2\2\2\u01c3\u01c4") + buf.write("\7<\2\2\u01c4Q\3\2\2\2\u01c5\u01c6\7\60\2\2\u01c6S\3\2") + buf.write("\2\2\u01c7\u01c8\7.\2\2\u01c8U\3\2\2\2\u01c9\u01ca\7}") + buf.write("\2\2\u01ca\u01cb\3\2\2\2\u01cb\u01cc\b+\2\2\u01ccW\3\2") + buf.write("\2\2\u01cd\u01ce\7$\2\2\u01ceY\3\2\2\2\u01cf\u01d0\7]") + buf.write("\2\2\u01d0[\3\2\2\2\u01d1\u01d2\7_\2\2\u01d2]\3\2\2\2") + buf.write("\u01d3\u01d7\7%\2\2\u01d4\u01d6\n\2\2\2\u01d5\u01d4\3") + buf.write("\2\2\2\u01d6\u01d9\3\2\2\2\u01d7\u01d5\3\2\2\2\u01d7\u01d8") + buf.write("\3\2\2\2\u01d8\u01da\3\2\2\2\u01d9\u01d7\3\2\2\2\u01da") + buf.write("\u01db\b/\3\2\u01db_\3\2\2\2\u01dc\u01de\t\3\2\2\u01dd") + buf.write("\u01dc\3\2\2\2\u01de\u01df\3\2\2\2\u01df\u01dd\3\2\2\2") + buf.write("\u01df\u01e0\3\2\2\2\u01e0\u01e1\3\2\2\2\u01e1\u01e2\b") + buf.write("\60\3\2\u01e2a\3\2\2\2\u01e3\u01e5\7\17\2\2\u01e4\u01e3") + buf.write("\3\2\2\2\u01e4\u01e5\3\2\2\2\u01e5\u01e6\3\2\2\2\u01e6") + buf.write('\u01ea\7\f\2\2\u01e7\u01e9\7"\2\2\u01e8\u01e7\3\2\2\2') + buf.write("\u01e9\u01ec\3\2\2\2\u01ea\u01e8\3\2\2\2\u01ea\u01eb\3") + buf.write("\2\2\2\u01ebc\3\2\2\2\u01ec\u01ea\3\2\2\2\u01ed\u01ee") + buf.write("\7*\2\2\u01eee\3\2\2\2\u01ef\u01f0\7+\2\2\u01f0g\3\2\2") + buf.write("\2\u01f1\u01f2\7>\2\2\u01f2i\3\2\2\2\u01f3\u01f4\7>\2") + buf.write("\2\u01f4\u01f5\7?\2\2\u01f5k\3\2\2\2\u01f6\u01f7\7@\2") + buf.write("\2\u01f7m\3\2\2\2\u01f8\u01f9\7@\2\2\u01f9\u01fa\7?\2") + buf.write("\2\u01fao\3\2\2\2\u01fb\u01fc\7?\2\2\u01fc\u01fd\7?\2") + buf.write("\2\u01fdq\3\2\2\2\u01fe\u01ff\7#\2\2\u01ff\u0200\7?\2") + buf.write("\2\u0200s\3\2\2\2\u0201\u0202\7C\2\2\u0202\u0203\7p\2") + buf.write("\2\u0203\u0204\7f\2\2\u0204u\3\2\2\2\u0205\u0206\7Q\2") + buf.write("\2\u0206\u0207\7t\2\2\u0207w\3\2\2\2\u0208\u0209\7#\2") + buf.write("\2\u0209y\3\2\2\2\u020a\u020b\7,\2\2\u020b{\3\2\2\2\u020c") + buf.write("\u020d\7\61\2\2\u020d}\3\2\2\2\u020e\u020f\7/\2\2\u020f") + buf.write("\177\3\2\2\2\u0210\u0211\7-\2\2\u0211\u0081\3\2\2\2\u0212") + buf.write("\u0214\t\4\2\2\u0213\u0212\3\2\2\2\u0214\u0215\3\2\2\2") + buf.write("\u0215\u0213\3\2\2\2\u0215\u0216\3\2\2\2\u0216\u0083\3") + buf.write("\2\2\2\u0217\u0218\5\u0082A\2\u0218\u0219\7\60\2\2\u0219") + buf.write("\u021a\5\u0082A\2\u021a\u0085\3\2\2\2\u021b\u0221\7$\2") + buf.write("\2\u021c\u021d\7^\2\2\u021d\u0220\7$\2\2\u021e\u0220\13") + buf.write("\2\2\2\u021f\u021c\3\2\2\2\u021f\u021e\3\2\2\2\u0220\u0223") + buf.write("\3\2\2\2\u0221\u0222\3\2\2\2\u0221\u021f\3\2\2\2\u0222") + buf.write("\u0224\3\2\2\2\u0223\u0221\3\2\2\2\u0224\u0225\7$\2\2") + buf.write("\u0225\u0087\3\2\2\2\u0226\u022a\t\5\2\2\u0227\u0229\t") + buf.write("\6\2\2\u0228\u0227\3\2\2\2\u0229\u022c\3\2\2\2\u022a\u0228") + buf.write("\3\2\2\2\u022a\u022b\3\2\2\2\u022b\u0089\3\2\2\2\u022c") + buf.write("\u022a\3\2\2\2\u022d\u0231\t\7\2\2\u022e\u0230\t\6\2\2") + buf.write("\u022f\u022e\3\2\2\2\u0230\u0233\3\2\2\2\u0231\u022f\3") + buf.write("\2\2\2\u0231\u0232\3\2\2\2\u0232\u008b\3\2\2\2\u0233\u0231") + buf.write("\3\2\2\2\u0234\u023a\7$\2\2\u0235\u0236\7^\2\2\u0236\u0239") + buf.write("\7$\2\2\u0237\u0239\13\2\2\2\u0238\u0235\3\2\2\2\u0238") + buf.write("\u0237\3\2\2\2\u0239\u023c\3\2\2\2\u023a\u023b\3\2\2\2") + buf.write("\u023a\u0238\3\2\2\2\u023b\u023d\3\2\2\2\u023c\u023a\3") + buf.write("\2\2\2\u023d\u023e\7$\2\2\u023e\u008d\3\2\2\2\u023f\u0240") + buf.write("\7v\2\2\u0240\u0241\7t\2\2\u0241\u0242\7w\2\2\u0242\u0243") + buf.write("\7g\2\2\u0243\u008f\3\2\2\2\u0244\u0245\7h\2\2\u0245\u0246") + buf.write("\7c\2\2\u0246\u0247\7n\2\2\u0247\u0248\7u\2\2\u0248\u0249") + buf.write("\7g\2\2\u0249\u0091\3\2\2\2\u024a\u024b\7<\2\2\u024b\u0093") + buf.write("\3\2\2\2\u024c\u024d\7$\2\2\u024d\u0095\3\2\2\2\u024e") + buf.write("\u0250\7%\2\2\u024f\u0251\n\2\2\2\u0250\u024f\3\2\2\2") + buf.write("\u0251\u0252\3\2\2\2\u0252\u0250\3\2\2\2\u0252\u0253\3") + buf.write("\2\2\2\u0253\u0254\3\2\2\2\u0254\u0255\bK\3\2\u0255\u0097") + buf.write("\3\2\2\2\u0256\u0257\7]\2\2\u0257\u0099\3\2\2\2\u0258") + buf.write("\u0259\7_\2\2\u0259\u009b\3\2\2\2\u025a\u025b\7.\2\2\u025b") + buf.write("\u009d\3\2\2\2\u025c\u025e\7/\2\2\u025d\u025c\3\2\2\2") + buf.write("\u025d\u025e\3\2\2\2\u025e\u025f\3\2\2\2\u025f\u0266\5") + buf.write("\u00a0P\2\u0260\u0262\7\60\2\2\u0261\u0263\t\4\2\2\u0262") + buf.write("\u0261\3\2\2\2\u0263\u0264\3\2\2\2\u0264\u0262\3\2\2\2") + buf.write("\u0264\u0265\3\2\2\2\u0265\u0267\3\2\2\2\u0266\u0260\3") + buf.write("\2\2\2\u0266\u0267\3\2\2\2\u0267\u0269\3\2\2\2\u0268\u026a") + buf.write("\5\u00a2Q\2\u0269\u0268\3\2\2\2\u0269\u026a\3\2\2\2\u026a") + buf.write("\u009f\3\2\2\2\u026b\u0274\7\62\2\2\u026c\u0270\t\b\2") + buf.write("\2\u026d\u026f\t\4\2\2\u026e\u026d\3\2\2\2\u026f\u0272") + buf.write("\3\2\2\2\u0270\u026e\3\2\2\2\u0270\u0271\3\2\2\2\u0271") + buf.write("\u0274\3\2\2\2\u0272\u0270\3\2\2\2\u0273\u026b\3\2\2\2") + buf.write("\u0273\u026c\3\2\2\2\u0274\u00a1\3\2\2\2\u0275\u0277\t") + buf.write("\t\2\2\u0276\u0278\t\n\2\2\u0277\u0276\3\2\2\2\u0277\u0278") + buf.write("\3\2\2\2\u0278\u0279\3\2\2\2\u0279\u027a\5\u00a0P\2\u027a") + buf.write("\u00a3\3\2\2\2\u027b\u027d\t\13\2\2\u027c\u027b\3\2\2") + buf.write("\2\u027d\u027e\3\2\2\2\u027e\u027c\3\2\2\2\u027e\u027f") + buf.write("\3\2\2\2\u027f\u0280\3\2\2\2\u0280\u0281\bR\3\2\u0281") + buf.write("\u00a5\3\2\2\2\u0282\u0283\7}\2\2\u0283\u0284\3\2\2\2") + buf.write("\u0284\u0285\bS\2\2\u0285\u00a7\3\2\2\2\u0286\u0287\7") + buf.write("\177\2\2\u0287\u0288\3\2\2\2\u0288\u0289\bT\4\2\u0289") + buf.write("\u00a9\3\2\2\2\30\2\3\u01d7\u01df\u01e4\u01ea\u0215\u021f") + buf.write("\u0221\u022a\u0231\u0238\u023a\u0252\u025d\u0264\u0266") + buf.write("\u0269\u0270\u0273\u0277\u027e\5\7\3\2\b\2\2\6\2\2") return buf.getvalue() @@ -214,123 +301,339 @@ class PFDLLexer(Lexer): atn = ATNDeserializer().deserialize(serializedATN()) - decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)] JSON = 1 INDENT = 1 DEDENT = 2 - STRUCT = 3 - TASK = 4 - IN = 5 - OUT = 6 - LOOP = 7 - WHILE = 8 - TO = 9 - PARALLEL = 10 - CONDITION = 11 - PASSED = 12 - FAILED = 13 - ON_DONE = 14 - END = 15 - NUMBER_P = 16 - STRING_P = 17 - BOOLEAN_P = 18 - TRUE = 19 - FALSE = 20 - COLON = 21 - DOT = 22 - COMMA = 23 - JSON_OPEN = 24 - QUOTE = 25 - ARRAY_LEFT = 26 - ARRAY_RIGHT = 27 - COMMENT = 28 - WHITESPACE = 29 - NL = 30 - LEFT_PARENTHESIS = 31 - RIGHT_PARENTHESIS = 32 - LESS_THAN = 33 - LESS_THAN_OR_EQUAL = 34 - GREATER_THAN = 35 - GREATER_THAN_OR_EQUAL = 36 - EQUAL = 37 - NOT_EQUAL = 38 - BOOLEAN_AND = 39 - BOOLEAN_OR = 40 - BOOLEAN_NOT = 41 - STAR = 42 - SLASH = 43 - MINUS = 44 - PLUS = 45 - INTEGER = 46 - FLOAT = 47 - STRING = 48 - STARTS_WITH_LOWER_C_STR = 49 - STARTS_WITH_UPPER_C_STR = 50 - JSON_STRING = 51 - JSON_TRUE = 52 - JSON_FALSE = 53 - JSON_COLON = 54 - JSON_QUOTE = 55 - JSON_COMMENT = 56 - JSON_ARRAY_LEFT = 57 - JSON_ARRAY_RIGHT = 58 - JSON_COMMA = 59 - NUMBER = 60 - WS = 61 - JSON_OPEN_2 = 62 - JSON_CLOSE = 63 + RULE = 3 + MODULE = 4 + IMPORT = 5 + TRANSPORT = 6 + MOVE = 7 + ACTION = 8 + FROM = 9 + DO = 10 + REPEAT = 11 + ON_DONE = 12 + TRANSPORT_ORDER_STEP = 13 + MOVE_ORDER_STEP = 14 + ACTION_ORDER_STEP = 15 + CONSTRAINTS = 16 + PARAMETERS = 17 + STARTED_BY = 18 + FINISHED_BY = 19 + LOCATION = 20 + EVENT = 21 + TIME = 22 + ASSIGNMENT = 23 + STRUCT = 24 + TASK = 25 + IN = 26 + OUT = 27 + LOOP = 28 + WHILE = 29 + TO = 30 + PARALLEL = 31 + CONDITION = 32 + PASSED = 33 + FAILED = 34 + END = 35 + NUMBER_P = 36 + STRING_P = 37 + BOOLEAN_P = 38 + TRUE = 39 + FALSE = 40 + COLON = 41 + DOT = 42 + COMMA = 43 + JSON_OPEN = 44 + QUOTE = 45 + ARRAY_LEFT = 46 + ARRAY_RIGHT = 47 + COMMENT = 48 + WHITESPACE = 49 + NL = 50 + LEFT_PARENTHESIS = 51 + RIGHT_PARENTHESIS = 52 + LESS_THAN = 53 + LESS_THAN_OR_EQUAL = 54 + GREATER_THAN = 55 + GREATER_THAN_OR_EQUAL = 56 + EQUAL = 57 + NOT_EQUAL = 58 + BOOLEAN_AND = 59 + BOOLEAN_OR = 60 + BOOLEAN_NOT = 61 + STAR = 62 + SLASH = 63 + MINUS = 64 + PLUS = 65 + INTEGER = 66 + FLOAT = 67 + STRING = 68 + STARTS_WITH_LOWER_C_STR = 69 + STARTS_WITH_UPPER_C_STR = 70 + JSON_STRING = 71 + JSON_TRUE = 72 + JSON_FALSE = 73 + JSON_COLON = 74 + JSON_QUOTE = 75 + JSON_COMMENT = 76 + JSON_ARRAY_LEFT = 77 + JSON_ARRAY_RIGHT = 78 + JSON_COMMA = 79 + NUMBER = 80 + WS = 81 + JSON_OPEN_2 = 82 + JSON_CLOSE = 83 - channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] + channelNames = ["DEFAULT_TOKEN_CHANNEL", "HIDDEN"] - modeNames = [ "DEFAULT_MODE", "JSON" ] + modeNames = ["DEFAULT_MODE", "JSON"] - literalNames = [ "", - "'Struct'", "'Task'", "'In'", "'Out'", "'Loop'", "'While'", - "'To'", "'Parallel'", "'Condition'", "'Passed'", "'Failed'", - "'OnDone'", "'End'", "'number'", "'string'", "'boolean'", "'.'", - "'('", "')'", "'<'", "'<='", "'>'", "'>='", "'=='", "'!='", - "'And'", "'Or'", "'!'", "'*'", "'/'", "'-'", "'+'", "'}'" ] + literalNames = [ + "", + "'Rule'", + "'Module'", + "'Import'", + "'Transport'", + "'Move'", + "'Action'", + "'From'", + "'Do'", + "'Repeat'", + "'OnDone'", + "'TransportOrderStep'", + "'MoveOrderStep'", + "'ActionOrderStep'", + "'Constraints'", + "'Parameters'", + "'StartedBy'", + "'FinishedBy'", + "'Location'", + "'Event'", + "'Time'", + "'='", + "'Struct'", + "'Task'", + "'In'", + "'Out'", + "'Loop'", + "'While'", + "'To'", + "'Parallel'", + "'Condition'", + "'Passed'", + "'Failed'", + "'End'", + "'number'", + "'string'", + "'boolean'", + "'.'", + "'('", + "')'", + "'<'", + "'<='", + "'>'", + "'>='", + "'=='", + "'!='", + "'And'", + "'Or'", + "'!'", + "'*'", + "'/'", + "'-'", + "'+'", + "'}'", + ] - symbolicNames = [ "", - "INDENT", "DEDENT", "STRUCT", "TASK", "IN", "OUT", "LOOP", "WHILE", - "TO", "PARALLEL", "CONDITION", "PASSED", "FAILED", "ON_DONE", - "END", "NUMBER_P", "STRING_P", "BOOLEAN_P", "TRUE", "FALSE", - "COLON", "DOT", "COMMA", "JSON_OPEN", "QUOTE", "ARRAY_LEFT", - "ARRAY_RIGHT", "COMMENT", "WHITESPACE", "NL", "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", "LESS_THAN", "LESS_THAN_OR_EQUAL", "GREATER_THAN", - "GREATER_THAN_OR_EQUAL", "EQUAL", "NOT_EQUAL", "BOOLEAN_AND", - "BOOLEAN_OR", "BOOLEAN_NOT", "STAR", "SLASH", "MINUS", "PLUS", - "INTEGER", "FLOAT", "STRING", "STARTS_WITH_LOWER_C_STR", "STARTS_WITH_UPPER_C_STR", - "JSON_STRING", "JSON_TRUE", "JSON_FALSE", "JSON_COLON", "JSON_QUOTE", - "JSON_COMMENT", "JSON_ARRAY_LEFT", "JSON_ARRAY_RIGHT", "JSON_COMMA", - "NUMBER", "WS", "JSON_OPEN_2", "JSON_CLOSE" ] + symbolicNames = [ + "", + "INDENT", + "DEDENT", + "RULE", + "MODULE", + "IMPORT", + "TRANSPORT", + "MOVE", + "ACTION", + "FROM", + "DO", + "REPEAT", + "ON_DONE", + "TRANSPORT_ORDER_STEP", + "MOVE_ORDER_STEP", + "ACTION_ORDER_STEP", + "CONSTRAINTS", + "PARAMETERS", + "STARTED_BY", + "FINISHED_BY", + "LOCATION", + "EVENT", + "TIME", + "ASSIGNMENT", + "STRUCT", + "TASK", + "IN", + "OUT", + "LOOP", + "WHILE", + "TO", + "PARALLEL", + "CONDITION", + "PASSED", + "FAILED", + "END", + "NUMBER_P", + "STRING_P", + "BOOLEAN_P", + "TRUE", + "FALSE", + "COLON", + "DOT", + "COMMA", + "JSON_OPEN", + "QUOTE", + "ARRAY_LEFT", + "ARRAY_RIGHT", + "COMMENT", + "WHITESPACE", + "NL", + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + "LESS_THAN", + "LESS_THAN_OR_EQUAL", + "GREATER_THAN", + "GREATER_THAN_OR_EQUAL", + "EQUAL", + "NOT_EQUAL", + "BOOLEAN_AND", + "BOOLEAN_OR", + "BOOLEAN_NOT", + "STAR", + "SLASH", + "MINUS", + "PLUS", + "INTEGER", + "FLOAT", + "STRING", + "STARTS_WITH_LOWER_C_STR", + "STARTS_WITH_UPPER_C_STR", + "JSON_STRING", + "JSON_TRUE", + "JSON_FALSE", + "JSON_COLON", + "JSON_QUOTE", + "JSON_COMMENT", + "JSON_ARRAY_LEFT", + "JSON_ARRAY_RIGHT", + "JSON_COMMA", + "NUMBER", + "WS", + "JSON_OPEN_2", + "JSON_CLOSE", + ] - ruleNames = [ "STRUCT", "TASK", "IN", "OUT", "LOOP", "WHILE", "TO", - "PARALLEL", "CONDITION", "PASSED", "FAILED", "ON_DONE", - "END", "NUMBER_P", "STRING_P", "BOOLEAN_P", "TRUE", "FALSE", - "COLON", "DOT", "COMMA", "JSON_OPEN", "QUOTE", "ARRAY_LEFT", - "ARRAY_RIGHT", "COMMENT", "WHITESPACE", "NL", "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", "LESS_THAN", "LESS_THAN_OR_EQUAL", - "GREATER_THAN", "GREATER_THAN_OR_EQUAL", "EQUAL", "NOT_EQUAL", - "BOOLEAN_AND", "BOOLEAN_OR", "BOOLEAN_NOT", "STAR", "SLASH", - "MINUS", "PLUS", "INTEGER", "FLOAT", "STRING", "STARTS_WITH_LOWER_C_STR", - "STARTS_WITH_UPPER_C_STR", "JSON_STRING", "JSON_TRUE", - "JSON_FALSE", "JSON_COLON", "JSON_QUOTE", "JSON_COMMENT", - "JSON_ARRAY_LEFT", "JSON_ARRAY_RIGHT", "JSON_COMMA", "NUMBER", - "INT", "EXP", "WS", "JSON_OPEN_2", "JSON_CLOSE" ] + ruleNames = [ + "RULE", + "MODULE", + "IMPORT", + "TRANSPORT", + "MOVE", + "ACTION", + "FROM", + "DO", + "REPEAT", + "ON_DONE", + "TRANSPORT_ORDER_STEP", + "MOVE_ORDER_STEP", + "ACTION_ORDER_STEP", + "CONSTRAINTS", + "PARAMETERS", + "STARTED_BY", + "FINISHED_BY", + "LOCATION", + "EVENT", + "TIME", + "ASSIGNMENT", + "STRUCT", + "TASK", + "IN", + "OUT", + "LOOP", + "WHILE", + "TO", + "PARALLEL", + "CONDITION", + "PASSED", + "FAILED", + "END", + "NUMBER_P", + "STRING_P", + "BOOLEAN_P", + "TRUE", + "FALSE", + "COLON", + "DOT", + "COMMA", + "JSON_OPEN", + "QUOTE", + "ARRAY_LEFT", + "ARRAY_RIGHT", + "COMMENT", + "WHITESPACE", + "NL", + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + "LESS_THAN", + "LESS_THAN_OR_EQUAL", + "GREATER_THAN", + "GREATER_THAN_OR_EQUAL", + "EQUAL", + "NOT_EQUAL", + "BOOLEAN_AND", + "BOOLEAN_OR", + "BOOLEAN_NOT", + "STAR", + "SLASH", + "MINUS", + "PLUS", + "INTEGER", + "FLOAT", + "STRING", + "STARTS_WITH_LOWER_C_STR", + "STARTS_WITH_UPPER_C_STR", + "JSON_STRING", + "JSON_TRUE", + "JSON_FALSE", + "JSON_COLON", + "JSON_QUOTE", + "JSON_COMMENT", + "JSON_ARRAY_LEFT", + "JSON_ARRAY_RIGHT", + "JSON_COMMA", + "NUMBER", + "INT", + "EXP", + "WS", + "JSON_OPEN_2", + "JSON_CLOSE", + ] grammarFileName = "PFDLLexer.g4" - def __init__(self, input=None, output:TextIO = sys.stdout): + def __init__(self, input=None, output: TextIO = sys.stdout): super().__init__(input, output) self.checkVersion("4.9.3") - self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) + self._interp = LexerATNSimulator( + self, self.atn, self.decisionsToDFA, PredictionContextCache() + ) self._actions = None self._predicates = None - class PFDLDenter(DenterHelper): def __init__(self, lexer, nl_token, indent_token, dedent_token, ignore_eof): super().__init__(nl_token, indent_token, dedent_token, ignore_eof) @@ -343,7 +646,7 @@ def pull_token(self): def nextToken(self): if not self.denter: - self.denter = self.PFDLDenter(self, self.NL, PFDLLexer.INDENT, PFDLLexer.DEDENT, ignore_eof=False) + self.denter = self.PFDLDenter( + self, self.NL, PFDLLexer.INDENT, PFDLLexer.DEDENT, ignore_eof=False + ) return self.denter.next_token() - - diff --git a/pfdl_scheduler/parser/PFDLParser.py b/pfdl_scheduler/parser/PFDLParser.py index dae5f8d..a584fe3 100644 --- a/pfdl_scheduler/parser/PFDLParser.py +++ b/pfdl_scheduler/parser/PFDLParser.py @@ -1,4 +1,4 @@ -# Generated from PFDLParser.g4 by ANTLR 4.9.3 +# Generated from temp/PFDLParser.g4 by ANTLR 4.9.3 # encoding: utf-8 from antlr4 import * from io import StringIO @@ -11,215 +11,342 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3A") - buf.write("\u01b3\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3U") + buf.write("\u02b1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23") buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31") buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36") - buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\3\2\3\2\3\2\7\2H\n\2\f") - buf.write("\2\16\2K\13\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\6\3T\n\3\r\3") - buf.write("\16\3U\6\3X\n\3\r\3\16\3Y\3\3\3\3\3\3\3\4\3\4\3\4\3\4") - buf.write("\5\4c\n\4\3\4\6\4f\n\4\r\4\16\4g\3\4\5\4k\n\4\3\4\3\4") - buf.write("\3\4\3\5\3\5\3\5\3\5\6\5t\n\5\r\5\16\5u\6\5x\n\5\r\5\16") - buf.write("\5y\3\5\3\5\3\6\3\6\3\6\3\6\6\6\u0082\n\6\r\6\16\6\u0083") - buf.write("\6\6\u0086\n\6\r\6\16\6\u0087\3\6\3\6\3\7\3\7\3\7\3\7") - buf.write("\3\7\3\7\5\7\u0092\n\7\3\b\3\b\6\b\u0096\n\b\r\b\16\b") - buf.write("\u0097\3\b\3\b\3\b\5\b\u009d\n\b\3\b\5\b\u00a0\n\b\3\b") - buf.write("\5\b\u00a3\n\b\3\t\3\t\6\t\u00a7\n\t\r\t\16\t\u00a8\3") - buf.write("\t\3\t\3\t\5\t\u00ae\n\t\3\t\5\t\u00b1\n\t\3\t\5\t\u00b4") - buf.write("\n\t\3\n\3\n\3\n\3\n\6\n\u00ba\n\n\r\n\16\n\u00bb\3\n") - buf.write("\6\n\u00bf\n\n\r\n\16\n\u00c0\3\n\3\n\3\13\3\13\3\13\3") - buf.write("\13\6\13\u00c9\n\13\r\13\16\13\u00ca\6\13\u00cd\n\13\r") - buf.write("\13\16\13\u00ce\3\13\3\13\3\f\3\f\3\f\6\f\u00d6\n\f\r") - buf.write("\f\16\f\u00d7\3\f\3\f\3\r\3\r\3\r\3\r\3\r\6\r\u00e1\n") - buf.write("\r\r\r\16\r\u00e2\3\r\3\r\3\16\5\16\u00e8\n\16\3\16\3") - buf.write("\16\3\16\3\16\3\16\5\16\u00ef\n\16\3\16\3\16\6\16\u00f3") - buf.write("\n\16\r\16\16\16\u00f4\3\16\3\16\3\17\3\17\3\17\3\17\6") - buf.write("\17\u00fd\n\17\r\17\16\17\u00fe\3\17\3\17\3\17\5\17\u0104") - buf.write("\n\17\3\20\3\20\3\20\6\20\u0109\n\20\r\20\16\20\u010a") - buf.write("\3\20\3\20\3\21\3\21\3\21\6\21\u0112\n\21\r\21\16\21\u0113") - buf.write("\3\21\3\21\3\22\3\22\5\22\u011a\n\22\3\23\3\23\3\23\3") - buf.write("\23\6\23\u0120\n\23\r\23\16\23\u0121\3\23\3\23\3\23\3") - buf.write("\23\7\23\u0128\n\23\f\23\16\23\u012b\13\23\3\23\3\23\7") - buf.write("\23\u012f\n\23\f\23\16\23\u0132\13\23\5\23\u0134\n\23") - buf.write("\3\24\3\24\3\24\3\24\3\25\3\25\5\25\u013c\n\25\3\26\3") - buf.write("\26\3\27\3\27\3\27\3\27\5\27\u0144\n\27\6\27\u0146\n\27") - buf.write("\r\27\16\27\u0147\3\30\3\30\5\30\u014c\n\30\3\30\3\30") - buf.write("\3\31\5\31\u0151\n\31\3\31\3\31\3\32\3\32\3\32\3\32\3") - buf.write("\32\5\32\u015a\n\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33") - buf.write("\3\33\3\33\5\33\u0165\n\33\3\33\3\33\3\33\3\33\3\33\3") - buf.write("\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33") - buf.write("\3\33\3\33\3\33\3\33\3\33\3\33\7\33\u017d\n\33\f\33\16") - buf.write("\33\u0180\13\33\3\34\3\34\3\35\3\35\3\36\3\36\3\36\3\36") - buf.write("\7\36\u018a\n\36\f\36\16\36\u018d\13\36\3\36\3\36\3\36") - buf.write("\3\36\3\36\5\36\u0194\n\36\3\37\3\37\3\37\3\37\3 \3 \3") - buf.write("!\3!\3!\3!\3!\3!\5!\u01a2\n!\3\"\3\"\3\"\3\"\7\"\u01a8") - buf.write("\n\"\f\"\16\"\u01ab\13\"\3\"\3\"\3\"\3\"\5\"\u01b1\n\"") - buf.write("\3\"\2\3\64#\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"") - buf.write("$&(*,.\60\62\64\668:<>@B\2\7\4\2\22\24\64\64\4\2\60\60") - buf.write("\63\63\3\2\60\61\3\2#(\4\2\32\32@@\2\u01d8\2I\3\2\2\2") - buf.write("\4N\3\2\2\2\6^\3\2\2\2\bo\3\2\2\2\n}\3\2\2\2\f\u0091\3") - buf.write("\2\2\2\16\u00a2\3\2\2\2\20\u00b3\3\2\2\2\22\u00b5\3\2") - buf.write("\2\2\24\u00c4\3\2\2\2\26\u00d2\3\2\2\2\30\u00db\3\2\2") - buf.write("\2\32\u00e7\3\2\2\2\34\u00f8\3\2\2\2\36\u0105\3\2\2\2") - buf.write(" \u010e\3\2\2\2\"\u0119\3\2\2\2$\u0133\3\2\2\2&\u0135") - buf.write("\3\2\2\2(\u0139\3\2\2\2*\u013d\3\2\2\2,\u013f\3\2\2\2") - buf.write(".\u0149\3\2\2\2\60\u0150\3\2\2\2\62\u0159\3\2\2\2\64\u0164") - buf.write("\3\2\2\2\66\u0181\3\2\2\28\u0183\3\2\2\2:\u0193\3\2\2") - buf.write("\2<\u0195\3\2\2\2>\u0199\3\2\2\2@\u01a1\3\2\2\2B\u01b0") - buf.write("\3\2\2\2DH\7 \2\2EH\5\4\3\2FH\5\6\4\2GD\3\2\2\2GE\3\2") - buf.write("\2\2GF\3\2\2\2HK\3\2\2\2IG\3\2\2\2IJ\3\2\2\2JL\3\2\2\2") - buf.write("KI\3\2\2\2LM\7\2\2\3M\3\3\2\2\2NO\7\5\2\2OP\7\64\2\2P") - buf.write("W\7\3\2\2QS\5&\24\2RT\7 \2\2SR\3\2\2\2TU\3\2\2\2US\3\2") - buf.write("\2\2UV\3\2\2\2VX\3\2\2\2WQ\3\2\2\2XY\3\2\2\2YW\3\2\2\2") - buf.write("YZ\3\2\2\2Z[\3\2\2\2[\\\7\4\2\2\\]\7\21\2\2]\5\3\2\2\2") - buf.write("^_\7\6\2\2_`\7\63\2\2`b\7\3\2\2ac\5\b\5\2ba\3\2\2\2bc") - buf.write("\3\2\2\2ce\3\2\2\2df\5\f\7\2ed\3\2\2\2fg\3\2\2\2ge\3\2") - buf.write("\2\2gh\3\2\2\2hj\3\2\2\2ik\5\n\6\2ji\3\2\2\2jk\3\2\2\2") - buf.write("kl\3\2\2\2lm\7\4\2\2mn\7\21\2\2n\7\3\2\2\2op\7\7\2\2p") - buf.write("w\7\3\2\2qs\5&\24\2rt\7 \2\2sr\3\2\2\2tu\3\2\2\2us\3\2") - buf.write("\2\2uv\3\2\2\2vx\3\2\2\2wq\3\2\2\2xy\3\2\2\2yw\3\2\2\2") - buf.write("yz\3\2\2\2z{\3\2\2\2{|\7\4\2\2|\t\3\2\2\2}~\7\b\2\2~\u0085") - buf.write("\7\3\2\2\177\u0081\7\63\2\2\u0080\u0082\7 \2\2\u0081\u0080") - buf.write("\3\2\2\2\u0082\u0083\3\2\2\2\u0083\u0081\3\2\2\2\u0083") - buf.write("\u0084\3\2\2\2\u0084\u0086\3\2\2\2\u0085\177\3\2\2\2\u0086") - buf.write("\u0087\3\2\2\2\u0087\u0085\3\2\2\2\u0087\u0088\3\2\2\2") - buf.write("\u0088\u0089\3\2\2\2\u0089\u008a\7\4\2\2\u008a\13\3\2") - buf.write("\2\2\u008b\u0092\5\16\b\2\u008c\u0092\5\20\t\2\u008d\u0092") - buf.write("\5\26\f\2\u008e\u0092\5\30\r\2\u008f\u0092\5\32\16\2\u0090") - buf.write("\u0092\5\34\17\2\u0091\u008b\3\2\2\2\u0091\u008c\3\2\2") - buf.write("\2\u0091\u008d\3\2\2\2\u0091\u008e\3\2\2\2\u0091\u008f") - buf.write("\3\2\2\2\u0091\u0090\3\2\2\2\u0092\r\3\2\2\2\u0093\u0095") - buf.write("\7\64\2\2\u0094\u0096\7 \2\2\u0095\u0094\3\2\2\2\u0096") - buf.write("\u0097\3\2\2\2\u0097\u0095\3\2\2\2\u0097\u0098\3\2\2\2") - buf.write("\u0098\u00a3\3\2\2\2\u0099\u009a\7\64\2\2\u009a\u009c") - buf.write("\7\3\2\2\u009b\u009d\5\22\n\2\u009c\u009b\3\2\2\2\u009c") - buf.write("\u009d\3\2\2\2\u009d\u009f\3\2\2\2\u009e\u00a0\5\24\13") - buf.write("\2\u009f\u009e\3\2\2\2\u009f\u00a0\3\2\2\2\u00a0\u00a1") - buf.write("\3\2\2\2\u00a1\u00a3\7\4\2\2\u00a2\u0093\3\2\2\2\u00a2") - buf.write("\u0099\3\2\2\2\u00a3\17\3\2\2\2\u00a4\u00a6\7\63\2\2\u00a5") - buf.write("\u00a7\7 \2\2\u00a6\u00a5\3\2\2\2\u00a7\u00a8\3\2\2\2") - buf.write("\u00a8\u00a6\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a9\u00b4\3") - buf.write("\2\2\2\u00aa\u00ab\7\63\2\2\u00ab\u00ad\7\3\2\2\u00ac") - buf.write("\u00ae\5\22\n\2\u00ad\u00ac\3\2\2\2\u00ad\u00ae\3\2\2") - buf.write("\2\u00ae\u00b0\3\2\2\2\u00af\u00b1\5\24\13\2\u00b0\u00af") - buf.write("\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1\u00b2\3\2\2\2\u00b2") - buf.write("\u00b4\7\4\2\2\u00b3\u00a4\3\2\2\2\u00b3\u00aa\3\2\2\2") - buf.write("\u00b4\21\3\2\2\2\u00b5\u00b6\7\7\2\2\u00b6\u00be\7\3") - buf.write("\2\2\u00b7\u00b9\5\"\22\2\u00b8\u00ba\7 \2\2\u00b9\u00b8") - buf.write("\3\2\2\2\u00ba\u00bb\3\2\2\2\u00bb\u00b9\3\2\2\2\u00bb") - buf.write("\u00bc\3\2\2\2\u00bc\u00bf\3\2\2\2\u00bd\u00bf\5$\23\2") - buf.write("\u00be\u00b7\3\2\2\2\u00be\u00bd\3\2\2\2\u00bf\u00c0\3") - buf.write("\2\2\2\u00c0\u00be\3\2\2\2\u00c0\u00c1\3\2\2\2\u00c1\u00c2") - buf.write("\3\2\2\2\u00c2\u00c3\7\4\2\2\u00c3\23\3\2\2\2\u00c4\u00c5") - buf.write("\7\b\2\2\u00c5\u00cc\7\3\2\2\u00c6\u00c8\5&\24\2\u00c7") - buf.write("\u00c9\7 \2\2\u00c8\u00c7\3\2\2\2\u00c9\u00ca\3\2\2\2") - buf.write("\u00ca\u00c8\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb\u00cd\3") - buf.write("\2\2\2\u00cc\u00c6\3\2\2\2\u00cd\u00ce\3\2\2\2\u00ce\u00cc") - buf.write("\3\2\2\2\u00ce\u00cf\3\2\2\2\u00cf\u00d0\3\2\2\2\u00d0") - buf.write("\u00d1\7\4\2\2\u00d1\25\3\2\2\2\u00d2\u00d3\7\f\2\2\u00d3") - buf.write("\u00d5\7\3\2\2\u00d4\u00d6\5\20\t\2\u00d5\u00d4\3\2\2") - buf.write("\2\u00d6\u00d7\3\2\2\2\u00d7\u00d5\3\2\2\2\u00d7\u00d8") - buf.write("\3\2\2\2\u00d8\u00d9\3\2\2\2\u00d9\u00da\7\4\2\2\u00da") - buf.write("\27\3\2\2\2\u00db\u00dc\7\t\2\2\u00dc\u00dd\7\n\2\2\u00dd") - buf.write("\u00de\5\64\33\2\u00de\u00e0\7\3\2\2\u00df\u00e1\5\f\7") - buf.write("\2\u00e0\u00df\3\2\2\2\u00e1\u00e2\3\2\2\2\u00e2\u00e0") - buf.write("\3\2\2\2\u00e2\u00e3\3\2\2\2\u00e3\u00e4\3\2\2\2\u00e4") - buf.write("\u00e5\7\4\2\2\u00e5\31\3\2\2\2\u00e6\u00e8\7\f\2\2\u00e7") - buf.write("\u00e6\3\2\2\2\u00e7\u00e8\3\2\2\2\u00e8\u00e9\3\2\2\2") - buf.write("\u00e9\u00ea\7\t\2\2\u00ea\u00eb\7\63\2\2\u00eb\u00ee") - buf.write("\7\13\2\2\u00ec\u00ef\5,\27\2\u00ed\u00ef\7\60\2\2\u00ee") - buf.write("\u00ec\3\2\2\2\u00ee\u00ed\3\2\2\2\u00ef\u00f0\3\2\2\2") - buf.write("\u00f0\u00f2\7\3\2\2\u00f1\u00f3\5\f\7\2\u00f2\u00f1\3") - buf.write("\2\2\2\u00f3\u00f4\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f4\u00f5") - buf.write("\3\2\2\2\u00f5\u00f6\3\2\2\2\u00f6\u00f7\7\4\2\2\u00f7") - buf.write("\33\3\2\2\2\u00f8\u00f9\7\r\2\2\u00f9\u00fa\7\3\2\2\u00fa") - buf.write("\u00fc\5\64\33\2\u00fb\u00fd\7 \2\2\u00fc\u00fb\3\2\2") - buf.write("\2\u00fd\u00fe\3\2\2\2\u00fe\u00fc\3\2\2\2\u00fe\u00ff") - buf.write("\3\2\2\2\u00ff\u0100\3\2\2\2\u0100\u0101\7\4\2\2\u0101") - buf.write("\u0103\5\36\20\2\u0102\u0104\5 \21\2\u0103\u0102\3\2\2") - buf.write("\2\u0103\u0104\3\2\2\2\u0104\35\3\2\2\2\u0105\u0106\7") - buf.write("\16\2\2\u0106\u0108\7\3\2\2\u0107\u0109\5\f\7\2\u0108") - buf.write("\u0107\3\2\2\2\u0109\u010a\3\2\2\2\u010a\u0108\3\2\2\2") - buf.write("\u010a\u010b\3\2\2\2\u010b\u010c\3\2\2\2\u010c\u010d\7") - buf.write("\4\2\2\u010d\37\3\2\2\2\u010e\u010f\7\17\2\2\u010f\u0111") - buf.write("\7\3\2\2\u0110\u0112\5\f\7\2\u0111\u0110\3\2\2\2\u0112") - buf.write("\u0113\3\2\2\2\u0113\u0111\3\2\2\2\u0113\u0114\3\2\2\2") - buf.write("\u0114\u0115\3\2\2\2\u0115\u0116\7\4\2\2\u0116!\3\2\2") - buf.write("\2\u0117\u011a\7\63\2\2\u0118\u011a\5,\27\2\u0119\u0117") - buf.write("\3\2\2\2\u0119\u0118\3\2\2\2\u011a#\3\2\2\2\u011b\u011c") - buf.write("\7\64\2\2\u011c\u011d\7\3\2\2\u011d\u011f\5:\36\2\u011e") - buf.write("\u0120\7 \2\2\u011f\u011e\3\2\2\2\u0120\u0121\3\2\2\2") - buf.write("\u0121\u011f\3\2\2\2\u0121\u0122\3\2\2\2\u0122\u0123\3") - buf.write("\2\2\2\u0123\u0124\7\4\2\2\u0124\u0134\3\2\2\2\u0125\u0129") - buf.write("\7\64\2\2\u0126\u0128\7 \2\2\u0127\u0126\3\2\2\2\u0128") - buf.write("\u012b\3\2\2\2\u0129\u0127\3\2\2\2\u0129\u012a\3\2\2\2") - buf.write("\u012a\u012c\3\2\2\2\u012b\u0129\3\2\2\2\u012c\u0130\5") - buf.write(":\36\2\u012d\u012f\7 \2\2\u012e\u012d\3\2\2\2\u012f\u0132") - buf.write("\3\2\2\2\u0130\u012e\3\2\2\2\u0130\u0131\3\2\2\2\u0131") - buf.write("\u0134\3\2\2\2\u0132\u0130\3\2\2\2\u0133\u011b\3\2\2\2") - buf.write("\u0133\u0125\3\2\2\2\u0134%\3\2\2\2\u0135\u0136\7\63\2") - buf.write("\2\u0136\u0137\7\27\2\2\u0137\u0138\5(\25\2\u0138\'\3") - buf.write("\2\2\2\u0139\u013b\5*\26\2\u013a\u013c\5.\30\2\u013b\u013a") - buf.write("\3\2\2\2\u013b\u013c\3\2\2\2\u013c)\3\2\2\2\u013d\u013e") - buf.write("\t\2\2\2\u013e+\3\2\2\2\u013f\u0145\7\63\2\2\u0140\u0141") - buf.write("\7\30\2\2\u0141\u0143\7\63\2\2\u0142\u0144\5.\30\2\u0143") - buf.write("\u0142\3\2\2\2\u0143\u0144\3\2\2\2\u0144\u0146\3\2\2\2") - buf.write("\u0145\u0140\3\2\2\2\u0146\u0147\3\2\2\2\u0147\u0145\3") - buf.write("\2\2\2\u0147\u0148\3\2\2\2\u0148-\3\2\2\2\u0149\u014b") - buf.write("\7\34\2\2\u014a\u014c\t\3\2\2\u014b\u014a\3\2\2\2\u014b") - buf.write("\u014c\3\2\2\2\u014c\u014d\3\2\2\2\u014d\u014e\7\35\2") - buf.write("\2\u014e/\3\2\2\2\u014f\u0151\7.\2\2\u0150\u014f\3\2\2") - buf.write("\2\u0150\u0151\3\2\2\2\u0151\u0152\3\2\2\2\u0152\u0153") - buf.write("\t\4\2\2\u0153\61\3\2\2\2\u0154\u015a\7\25\2\2\u0155\u015a") - buf.write("\7\26\2\2\u0156\u015a\5\60\31\2\u0157\u015a\7\62\2\2\u0158") - buf.write("\u015a\5,\27\2\u0159\u0154\3\2\2\2\u0159\u0155\3\2\2\2") - buf.write("\u0159\u0156\3\2\2\2\u0159\u0157\3\2\2\2\u0159\u0158\3") - buf.write("\2\2\2\u015a\63\3\2\2\2\u015b\u015c\b\33\1\2\u015c\u015d") - buf.write("\7!\2\2\u015d\u015e\5\64\33\2\u015e\u015f\7\"\2\2\u015f") - buf.write("\u0165\3\2\2\2\u0160\u0161\58\35\2\u0161\u0162\5\64\33") - buf.write("\6\u0162\u0165\3\2\2\2\u0163\u0165\5\62\32\2\u0164\u015b") - buf.write("\3\2\2\2\u0164\u0160\3\2\2\2\u0164\u0163\3\2\2\2\u0165") - buf.write("\u017e\3\2\2\2\u0166\u0167\f\13\2\2\u0167\u0168\7,\2\2") - buf.write("\u0168\u017d\5\64\33\f\u0169\u016a\f\n\2\2\u016a\u016b") - buf.write("\7-\2\2\u016b\u017d\5\64\33\13\u016c\u016d\f\t\2\2\u016d") - buf.write("\u016e\7.\2\2\u016e\u017d\5\64\33\n\u016f\u0170\f\b\2") - buf.write("\2\u0170\u0171\7/\2\2\u0171\u017d\5\64\33\t\u0172\u0173") - buf.write("\f\7\2\2\u0173\u0174\5\66\34\2\u0174\u0175\5\64\33\b\u0175") - buf.write("\u017d\3\2\2\2\u0176\u0177\f\5\2\2\u0177\u0178\7)\2\2") - buf.write("\u0178\u017d\5\64\33\6\u0179\u017a\f\4\2\2\u017a\u017b") - buf.write("\7*\2\2\u017b\u017d\5\64\33\5\u017c\u0166\3\2\2\2\u017c") - buf.write("\u0169\3\2\2\2\u017c\u016c\3\2\2\2\u017c\u016f\3\2\2\2") - buf.write("\u017c\u0172\3\2\2\2\u017c\u0176\3\2\2\2\u017c\u0179\3") - buf.write("\2\2\2\u017d\u0180\3\2\2\2\u017e\u017c\3\2\2\2\u017e\u017f") - buf.write("\3\2\2\2\u017f\65\3\2\2\2\u0180\u017e\3\2\2\2\u0181\u0182") - buf.write("\t\5\2\2\u0182\67\3\2\2\2\u0183\u0184\7+\2\2\u01849\3") - buf.write("\2\2\2\u0185\u0186\5> \2\u0186\u018b\5<\37\2\u0187\u0188") - buf.write("\7=\2\2\u0188\u018a\5<\37\2\u0189\u0187\3\2\2\2\u018a") - buf.write("\u018d\3\2\2\2\u018b\u0189\3\2\2\2\u018b\u018c\3\2\2\2") - buf.write("\u018c\u018e\3\2\2\2\u018d\u018b\3\2\2\2\u018e\u018f\7") - buf.write("A\2\2\u018f\u0194\3\2\2\2\u0190\u0191\5> \2\u0191\u0192") - buf.write("\7A\2\2\u0192\u0194\3\2\2\2\u0193\u0185\3\2\2\2\u0193") - buf.write("\u0190\3\2\2\2\u0194;\3\2\2\2\u0195\u0196\7\65\2\2\u0196") - buf.write("\u0197\78\2\2\u0197\u0198\5@!\2\u0198=\3\2\2\2\u0199\u019a") - buf.write("\t\6\2\2\u019a?\3\2\2\2\u019b\u01a2\7\65\2\2\u019c\u01a2") - buf.write("\7\66\2\2\u019d\u01a2\7\67\2\2\u019e\u01a2\7>\2\2\u019f") - buf.write("\u01a2\5:\36\2\u01a0\u01a2\5B\"\2\u01a1\u019b\3\2\2\2") - buf.write("\u01a1\u019c\3\2\2\2\u01a1\u019d\3\2\2\2\u01a1\u019e\3") - buf.write("\2\2\2\u01a1\u019f\3\2\2\2\u01a1\u01a0\3\2\2\2\u01a2A") - buf.write("\3\2\2\2\u01a3\u01a4\7;\2\2\u01a4\u01a9\5@!\2\u01a5\u01a6") - buf.write("\7=\2\2\u01a6\u01a8\5@!\2\u01a7\u01a5\3\2\2\2\u01a8\u01ab") - buf.write("\3\2\2\2\u01a9\u01a7\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa") - buf.write("\u01ac\3\2\2\2\u01ab\u01a9\3\2\2\2\u01ac\u01ad\7<\2\2") - buf.write("\u01ad\u01b1\3\2\2\2\u01ae\u01af\7;\2\2\u01af\u01b1\7") - buf.write("<\2\2\u01b0\u01a3\3\2\2\2\u01b0\u01ae\3\2\2\2\u01b1C\3") - buf.write("\2\2\2\67GIUYbgjuy\u0083\u0087\u0091\u0097\u009c\u009f") - buf.write("\u00a2\u00a8\u00ad\u00b0\u00b3\u00bb\u00be\u00c0\u00ca") - buf.write("\u00ce\u00d7\u00e2\u00e7\u00ee\u00f4\u00fe\u0103\u010a") - buf.write("\u0113\u0119\u0121\u0129\u0130\u0133\u013b\u0143\u0147") - buf.write("\u014b\u0150\u0159\u0164\u017c\u017e\u018b\u0193\u01a1") - buf.write("\u01a9\u01b0") + buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t") + buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4") + buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64") + buf.write("\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\3\2\7") + buf.write("\2v\n\2\f\2\16\2y\13\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3") + buf.write("\5\3\u0083\n\3\3\4\3\4\3\4\3\4\5\4\u0089\n\4\3\4\3\4\3") + buf.write("\4\6\4\u008e\n\4\r\4\16\4\u008f\6\4\u0092\n\4\r\4\16\4") + buf.write("\u0093\3\4\3\4\3\4\3\5\3\5\5\5\u009b\n\5\3\6\3\6\3\6\3") + buf.write("\6\5\6\u00a1\n\6\3\6\6\6\u00a4\n\6\r\6\16\6\u00a5\3\6") + buf.write("\5\6\u00a9\n\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\6\7") + buf.write("\u00b4\n\7\r\7\16\7\u00b5\3\7\3\7\3\7\3\b\3\b\3\b\3\b") + buf.write("\6\b\u00bf\n\b\r\b\16\b\u00c0\6\b\u00c3\n\b\r\b\16\b\u00c4") + buf.write("\3\b\3\b\3\t\3\t\3\t\3\t\6\t\u00cd\n\t\r\t\16\t\u00ce") + buf.write("\6\t\u00d1\n\t\r\t\16\t\u00d2\3\t\3\t\3\n\3\n\3\n\5\n") + buf.write("\u00da\n\n\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13") + buf.write("\5\13\u00e5\n\13\3\f\3\f\6\f\u00e9\n\f\r\f\16\f\u00ea") + buf.write("\3\f\3\f\3\f\5\f\u00f0\n\f\3\f\5\f\u00f3\n\f\3\f\5\f\u00f6") + buf.write("\n\f\3\r\3\r\6\r\u00fa\n\r\r\r\16\r\u00fb\3\r\3\r\3\r") + buf.write("\5\r\u0101\n\r\3\r\5\r\u0104\n\r\3\r\5\r\u0107\n\r\3\16") + buf.write("\3\16\3\16\3\16\6\16\u010d\n\16\r\16\16\16\u010e\3\16") + buf.write("\6\16\u0112\n\16\r\16\16\16\u0113\3\16\3\16\3\17\3\17") + buf.write("\3\17\3\17\6\17\u011c\n\17\r\17\16\17\u011d\6\17\u0120") + buf.write("\n\17\r\17\16\17\u0121\3\17\3\17\3\20\3\20\3\20\6\20\u0129") + buf.write("\n\20\r\20\16\20\u012a\3\20\3\20\3\21\3\21\3\21\3\21\3") + buf.write("\21\6\21\u0134\n\21\r\21\16\21\u0135\3\21\3\21\3\22\5") + buf.write("\22\u013b\n\22\3\22\3\22\3\22\3\22\3\22\5\22\u0142\n\22") + buf.write("\3\22\3\22\6\22\u0146\n\22\r\22\16\22\u0147\3\22\3\22") + buf.write("\3\23\3\23\3\23\3\23\6\23\u0150\n\23\r\23\16\23\u0151") + buf.write("\3\23\3\23\3\23\5\23\u0157\n\23\3\24\3\24\3\24\6\24\u015c") + buf.write("\n\24\r\24\16\24\u015d\3\24\3\24\3\25\3\25\3\25\6\25\u0165") + buf.write("\n\25\r\25\16\25\u0166\3\25\3\25\3\26\3\26\5\26\u016d") + buf.write("\n\26\3\27\3\27\3\27\3\27\6\27\u0173\n\27\r\27\16\27\u0174") + buf.write("\3\27\3\27\3\27\3\27\7\27\u017b\n\27\f\27\16\27\u017e") + buf.write("\13\27\3\27\3\27\7\27\u0182\n\27\f\27\16\27\u0185\13\27") + buf.write("\5\27\u0187\n\27\3\30\3\30\3\30\3\30\3\31\3\31\5\31\u018f") + buf.write("\n\31\3\32\3\32\3\33\3\33\3\33\3\33\5\33\u0197\n\33\6") + buf.write("\33\u0199\n\33\r\33\16\33\u019a\3\33\3\33\3\33\3\33\5") + buf.write("\33\u01a1\n\33\7\33\u01a3\n\33\f\33\16\33\u01a6\13\33") + buf.write("\5\33\u01a8\n\33\3\34\3\34\3\34\3\34\5\34\u01ae\n\34\3") + buf.write("\35\3\35\5\35\u01b2\n\35\3\35\3\35\3\36\5\36\u01b7\n\36") + buf.write("\3\36\3\36\3\37\3\37\3\37\3\37\3\37\5\37\u01c0\n\37\3") + buf.write(" \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \5 \u01cd\n \3 \3 \3 \3") + buf.write(" \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3") + buf.write(" \7 \u01e5\n \f \16 \u01e8\13 \3!\3!\3\"\3\"\3#\3#\3#") + buf.write("\3#\7#\u01f2\n#\f#\16#\u01f5\13#\3#\3#\3#\3#\3#\5#\u01fc") + buf.write("\n#\3$\3$\3$\3$\3%\3%\3&\3&\3&\3&\3&\3&\5&\u020a\n&\3") + buf.write("\'\3\'\3\'\3\'\7\'\u0210\n\'\f\'\16\'\u0213\13\'\3\'\3") + buf.write("\'\3\'\3\'\5\'\u0219\n\'\3(\3(\3(\3(\3(\3(\3(\3)\3)\3") + buf.write(")\7)\u0225\n)\f)\16)\u0228\13)\3)\3)\3*\3*\3*\3*\3*\3") + buf.write("*\3+\3+\3+\3+\3+\3+\3,\3,\3,\5,\u023b\n,\3,\3,\3-\3-\3") + buf.write("-\3-\6-\u0243\n-\r-\16-\u0244\3-\3-\3-\3.\3.\3.\5.\u024d") + buf.write("\n.\3/\3/\3/\3/\6/\u0253\n/\r/\16/\u0254\3/\3/\3/\3\60") + buf.write("\3\60\3\60\5\60\u025d\n\60\3\61\3\61\3\61\5\61\u0262\n") + buf.write("\61\3\62\3\62\3\62\3\62\6\62\u0268\n\62\r\62\16\62\u0269") + buf.write("\3\62\3\62\3\62\3\63\3\63\3\63\3\63\5\63\u0273\n\63\3") + buf.write("\64\3\64\3\64\3\64\3\65\3\65\3\65\5\65\u027c\n\65\3\65") + buf.write("\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\5\66\u0288") + buf.write("\n\66\3\67\3\67\3\67\3\67\38\38\38\38\38\38\68\u0294\n") + buf.write("8\r8\168\u0295\38\38\38\39\39\39\39\39\79\u02a0\n9\f9") + buf.write("\169\u02a3\139\59\u02a5\n9\39\39\3:\3:\5:\u02ab\n:\3:") + buf.write("\3:\5:\u02af\n:\3:\2\3>;\2\4\6\b\n\f\16\20\22\24\26\30") + buf.write("\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`b") + buf.write("dfhjlnpr\2\b\4\2\26\30HH\5\2\26\30&(HH\4\2DDGG\3\2DE\3") + buf.write("\2\67<\4\2..TT\2\u02e4\2w\3\2\2\2\4\u0082\3\2\2\2\6\u0084") + buf.write("\3\2\2\2\b\u009a\3\2\2\2\n\u009c\3\2\2\2\f\u00ad\3\2\2") + buf.write("\2\16\u00ba\3\2\2\2\20\u00c8\3\2\2\2\22\u00d9\3\2\2\2") + buf.write("\24\u00e4\3\2\2\2\26\u00f5\3\2\2\2\30\u0106\3\2\2\2\32") + buf.write("\u0108\3\2\2\2\34\u0117\3\2\2\2\36\u0125\3\2\2\2 \u012e") + buf.write("\3\2\2\2\"\u013a\3\2\2\2$\u014b\3\2\2\2&\u0158\3\2\2\2") + buf.write("(\u0161\3\2\2\2*\u016c\3\2\2\2,\u0186\3\2\2\2.\u0188\3") + buf.write("\2\2\2\60\u018c\3\2\2\2\62\u0190\3\2\2\2\64\u01a7\3\2") + buf.write("\2\2\66\u01a9\3\2\2\28\u01af\3\2\2\2:\u01b6\3\2\2\2<\u01bf") + buf.write("\3\2\2\2>\u01cc\3\2\2\2@\u01e9\3\2\2\2B\u01eb\3\2\2\2") + buf.write("D\u01fb\3\2\2\2F\u01fd\3\2\2\2H\u0201\3\2\2\2J\u0209\3") + buf.write("\2\2\2L\u0218\3\2\2\2N\u021a\3\2\2\2P\u0221\3\2\2\2R\u022b") + buf.write("\3\2\2\2T\u0231\3\2\2\2V\u0237\3\2\2\2X\u023e\3\2\2\2") + buf.write("Z\u024c\3\2\2\2\\\u024e\3\2\2\2^\u025c\3\2\2\2`\u0261") + buf.write("\3\2\2\2b\u0263\3\2\2\2d\u0272\3\2\2\2f\u0274\3\2\2\2") + buf.write("h\u0278\3\2\2\2j\u0287\3\2\2\2l\u0289\3\2\2\2n\u028d\3") + buf.write("\2\2\2p\u029a\3\2\2\2r\u02aa\3\2\2\2tv\5\4\3\2ut\3\2\2") + buf.write("\2vy\3\2\2\2wu\3\2\2\2wx\3\2\2\2xz\3\2\2\2yw\3\2\2\2z") + buf.write("{\7\2\2\3{\3\3\2\2\2|\u0083\7\64\2\2}\u0083\5\6\4\2~\u0083") + buf.write("\5\n\6\2\177\u0083\5\f\7\2\u0080\u0083\5n8\2\u0081\u0083") + buf.write("\5`\61\2\u0082|\3\2\2\2\u0082}\3\2\2\2\u0082~\3\2\2\2") + buf.write("\u0082\177\3\2\2\2\u0082\u0080\3\2\2\2\u0082\u0081\3\2") + buf.write("\2\2\u0083\5\3\2\2\2\u0084\u0085\7\32\2\2\u0085\u0088") + buf.write("\7H\2\2\u0086\u0087\7+\2\2\u0087\u0089\5\b\5\2\u0088\u0086") + buf.write("\3\2\2\2\u0088\u0089\3\2\2\2\u0089\u008a\3\2\2\2\u008a") + buf.write("\u0091\7\3\2\2\u008b\u008d\5.\30\2\u008c\u008e\7\64\2") + buf.write("\2\u008d\u008c\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u008d") + buf.write("\3\2\2\2\u008f\u0090\3\2\2\2\u0090\u0092\3\2\2\2\u0091") + buf.write("\u008b\3\2\2\2\u0092\u0093\3\2\2\2\u0093\u0091\3\2\2\2") + buf.write("\u0093\u0094\3\2\2\2\u0094\u0095\3\2\2\2\u0095\u0096\7") + buf.write("\4\2\2\u0096\u0097\7%\2\2\u0097\7\3\2\2\2\u0098\u009b") + buf.write("\7H\2\2\u0099\u009b\t\2\2\2\u009a\u0098\3\2\2\2\u009a") + buf.write("\u0099\3\2\2\2\u009b\t\3\2\2\2\u009c\u009d\7\33\2\2\u009d") + buf.write("\u009e\7G\2\2\u009e\u00a0\7\3\2\2\u009f\u00a1\5\16\b\2") + buf.write("\u00a0\u009f\3\2\2\2\u00a0\u00a1\3\2\2\2\u00a1\u00a3\3") + buf.write("\2\2\2\u00a2\u00a4\5\22\n\2\u00a3\u00a2\3\2\2\2\u00a4") + buf.write("\u00a5\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a5\u00a6\3\2\2\2") + buf.write("\u00a6\u00a8\3\2\2\2\u00a7\u00a9\5\20\t\2\u00a8\u00a7") + buf.write("\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a9\u00aa\3\2\2\2\u00aa") + buf.write("\u00ab\7\4\2\2\u00ab\u00ac\7%\2\2\u00ac\13\3\2\2\2\u00ad") + buf.write("\u00ae\5\b\5\2\u00ae\u00af\7G\2\2\u00af\u00b3\7\3\2\2") + buf.write("\u00b0\u00b1\5\66\34\2\u00b1\u00b2\7\64\2\2\u00b2\u00b4") + buf.write("\3\2\2\2\u00b3\u00b0\3\2\2\2\u00b4\u00b5\3\2\2\2\u00b5") + buf.write("\u00b3\3\2\2\2\u00b5\u00b6\3\2\2\2\u00b6\u00b7\3\2\2\2") + buf.write("\u00b7\u00b8\7\4\2\2\u00b8\u00b9\7%\2\2\u00b9\r\3\2\2") + buf.write("\2\u00ba\u00bb\7\34\2\2\u00bb\u00c2\7\3\2\2\u00bc\u00be") + buf.write("\5.\30\2\u00bd\u00bf\7\64\2\2\u00be\u00bd\3\2\2\2\u00bf") + buf.write("\u00c0\3\2\2\2\u00c0\u00be\3\2\2\2\u00c0\u00c1\3\2\2\2") + buf.write("\u00c1\u00c3\3\2\2\2\u00c2\u00bc\3\2\2\2\u00c3\u00c4\3") + buf.write("\2\2\2\u00c4\u00c2\3\2\2\2\u00c4\u00c5\3\2\2\2\u00c5\u00c6") + buf.write("\3\2\2\2\u00c6\u00c7\7\4\2\2\u00c7\17\3\2\2\2\u00c8\u00c9") + buf.write("\7\35\2\2\u00c9\u00d0\7\3\2\2\u00ca\u00cc\7G\2\2\u00cb") + buf.write("\u00cd\7\64\2\2\u00cc\u00cb\3\2\2\2\u00cd\u00ce\3\2\2") + buf.write("\2\u00ce\u00cc\3\2\2\2\u00ce\u00cf\3\2\2\2\u00cf\u00d1") + buf.write("\3\2\2\2\u00d0\u00ca\3\2\2\2\u00d1\u00d2\3\2\2\2\u00d2") + buf.write("\u00d0\3\2\2\2\u00d2\u00d3\3\2\2\2\u00d3\u00d4\3\2\2\2") + buf.write("\u00d4\u00d5\7\4\2\2\u00d5\21\3\2\2\2\u00d6\u00da\5\24") + buf.write("\13\2\u00d7\u00da\5j\66\2\u00d8\u00da\5V,\2\u00d9\u00d6") + buf.write("\3\2\2\2\u00d9\u00d7\3\2\2\2\u00d9\u00d8\3\2\2\2\u00da") + buf.write("\23\3\2\2\2\u00db\u00e5\5\26\f\2\u00dc\u00e5\5\30\r\2") + buf.write("\u00dd\u00e5\5\36\20\2\u00de\u00e5\5 \21\2\u00df\u00e5") + buf.write("\5\"\22\2\u00e0\u00e5\5$\23\2\u00e1\u00e5\5N(\2\u00e2") + buf.write("\u00e5\5R*\2\u00e3\u00e5\5T+\2\u00e4\u00db\3\2\2\2\u00e4") + buf.write("\u00dc\3\2\2\2\u00e4\u00dd\3\2\2\2\u00e4\u00de\3\2\2\2") + buf.write("\u00e4\u00df\3\2\2\2\u00e4\u00e0\3\2\2\2\u00e4\u00e1\3") + buf.write("\2\2\2\u00e4\u00e2\3\2\2\2\u00e4\u00e3\3\2\2\2\u00e5\25") + buf.write("\3\2\2\2\u00e6\u00e8\7H\2\2\u00e7\u00e9\7\64\2\2\u00e8") + buf.write("\u00e7\3\2\2\2\u00e9\u00ea\3\2\2\2\u00ea\u00e8\3\2\2\2") + buf.write("\u00ea\u00eb\3\2\2\2\u00eb\u00f6\3\2\2\2\u00ec\u00ed\7") + buf.write("H\2\2\u00ed\u00ef\7\3\2\2\u00ee\u00f0\5\32\16\2\u00ef") + buf.write("\u00ee\3\2\2\2\u00ef\u00f0\3\2\2\2\u00f0\u00f2\3\2\2\2") + buf.write("\u00f1\u00f3\5\34\17\2\u00f2\u00f1\3\2\2\2\u00f2\u00f3") + buf.write("\3\2\2\2\u00f3\u00f4\3\2\2\2\u00f4\u00f6\7\4\2\2\u00f5") + buf.write("\u00e6\3\2\2\2\u00f5\u00ec\3\2\2\2\u00f6\27\3\2\2\2\u00f7") + buf.write("\u00f9\7G\2\2\u00f8\u00fa\7\64\2\2\u00f9\u00f8\3\2\2\2") + buf.write("\u00fa\u00fb\3\2\2\2\u00fb\u00f9\3\2\2\2\u00fb\u00fc\3") + buf.write("\2\2\2\u00fc\u0107\3\2\2\2\u00fd\u00fe\7G\2\2\u00fe\u0100") + buf.write("\7\3\2\2\u00ff\u0101\5\32\16\2\u0100\u00ff\3\2\2\2\u0100") + buf.write("\u0101\3\2\2\2\u0101\u0103\3\2\2\2\u0102\u0104\5\34\17") + buf.write("\2\u0103\u0102\3\2\2\2\u0103\u0104\3\2\2\2\u0104\u0105") + buf.write("\3\2\2\2\u0105\u0107\7\4\2\2\u0106\u00f7\3\2\2\2\u0106") + buf.write("\u00fd\3\2\2\2\u0107\31\3\2\2\2\u0108\u0109\7\34\2\2\u0109") + buf.write("\u0111\7\3\2\2\u010a\u010c\5*\26\2\u010b\u010d\7\64\2") + buf.write("\2\u010c\u010b\3\2\2\2\u010d\u010e\3\2\2\2\u010e\u010c") + buf.write("\3\2\2\2\u010e\u010f\3\2\2\2\u010f\u0112\3\2\2\2\u0110") + buf.write("\u0112\5,\27\2\u0111\u010a\3\2\2\2\u0111\u0110\3\2\2\2") + buf.write("\u0112\u0113\3\2\2\2\u0113\u0111\3\2\2\2\u0113\u0114\3") + buf.write("\2\2\2\u0114\u0115\3\2\2\2\u0115\u0116\7\4\2\2\u0116\33") + buf.write("\3\2\2\2\u0117\u0118\7\35\2\2\u0118\u011f\7\3\2\2\u0119") + buf.write("\u011b\5.\30\2\u011a\u011c\7\64\2\2\u011b\u011a\3\2\2") + buf.write("\2\u011c\u011d\3\2\2\2\u011d\u011b\3\2\2\2\u011d\u011e") + buf.write("\3\2\2\2\u011e\u0120\3\2\2\2\u011f\u0119\3\2\2\2\u0120") + buf.write("\u0121\3\2\2\2\u0121\u011f\3\2\2\2\u0121\u0122\3\2\2\2") + buf.write("\u0122\u0123\3\2\2\2\u0123\u0124\7\4\2\2\u0124\35\3\2") + buf.write("\2\2\u0125\u0126\7!\2\2\u0126\u0128\7\3\2\2\u0127\u0129") + buf.write("\5\30\r\2\u0128\u0127\3\2\2\2\u0129\u012a\3\2\2\2\u012a") + buf.write("\u0128\3\2\2\2\u012a\u012b\3\2\2\2\u012b\u012c\3\2\2\2") + buf.write("\u012c\u012d\7\4\2\2\u012d\37\3\2\2\2\u012e\u012f\7\36") + buf.write("\2\2\u012f\u0130\7\37\2\2\u0130\u0131\5> \2\u0131\u0133") + buf.write("\7\3\2\2\u0132\u0134\5\24\13\2\u0133\u0132\3\2\2\2\u0134") + buf.write("\u0135\3\2\2\2\u0135\u0133\3\2\2\2\u0135\u0136\3\2\2\2") + buf.write("\u0136\u0137\3\2\2\2\u0137\u0138\7\4\2\2\u0138!\3\2\2") + buf.write("\2\u0139\u013b\7!\2\2\u013a\u0139\3\2\2\2\u013a\u013b") + buf.write("\3\2\2\2\u013b\u013c\3\2\2\2\u013c\u013d\7\36\2\2\u013d") + buf.write("\u013e\7G\2\2\u013e\u0141\7 \2\2\u013f\u0142\5\64\33\2") + buf.write("\u0140\u0142\7D\2\2\u0141\u013f\3\2\2\2\u0141\u0140\3") + buf.write("\2\2\2\u0142\u0143\3\2\2\2\u0143\u0145\7\3\2\2\u0144\u0146") + buf.write("\5\24\13\2\u0145\u0144\3\2\2\2\u0146\u0147\3\2\2\2\u0147") + buf.write("\u0145\3\2\2\2\u0147\u0148\3\2\2\2\u0148\u0149\3\2\2\2") + buf.write("\u0149\u014a\7\4\2\2\u014a#\3\2\2\2\u014b\u014c\7\"\2") + buf.write("\2\u014c\u014d\7\3\2\2\u014d\u014f\5> \2\u014e\u0150\7") + buf.write("\64\2\2\u014f\u014e\3\2\2\2\u0150\u0151\3\2\2\2\u0151") + buf.write("\u014f\3\2\2\2\u0151\u0152\3\2\2\2\u0152\u0153\3\2\2\2") + buf.write("\u0153\u0154\7\4\2\2\u0154\u0156\5&\24\2\u0155\u0157\5") + buf.write("(\25\2\u0156\u0155\3\2\2\2\u0156\u0157\3\2\2\2\u0157%") + buf.write("\3\2\2\2\u0158\u0159\7#\2\2\u0159\u015b\7\3\2\2\u015a") + buf.write("\u015c\5\24\13\2\u015b\u015a\3\2\2\2\u015c\u015d\3\2\2") + buf.write("\2\u015d\u015b\3\2\2\2\u015d\u015e\3\2\2\2\u015e\u015f") + buf.write("\3\2\2\2\u015f\u0160\7\4\2\2\u0160\'\3\2\2\2\u0161\u0162") + buf.write("\7$\2\2\u0162\u0164\7\3\2\2\u0163\u0165\5\24\13\2\u0164") + buf.write("\u0163\3\2\2\2\u0165\u0166\3\2\2\2\u0166\u0164\3\2\2\2") + buf.write("\u0166\u0167\3\2\2\2\u0167\u0168\3\2\2\2\u0168\u0169\7") + buf.write("\4\2\2\u0169)\3\2\2\2\u016a\u016d\7G\2\2\u016b\u016d\5") + buf.write("\64\33\2\u016c\u016a\3\2\2\2\u016c\u016b\3\2\2\2\u016d") + buf.write("+\3\2\2\2\u016e\u016f\7H\2\2\u016f\u0170\7\3\2\2\u0170") + buf.write("\u0172\5D#\2\u0171\u0173\7\64\2\2\u0172\u0171\3\2\2\2") + buf.write("\u0173\u0174\3\2\2\2\u0174\u0172\3\2\2\2\u0174\u0175\3") + buf.write("\2\2\2\u0175\u0176\3\2\2\2\u0176\u0177\7\4\2\2\u0177\u0187") + buf.write("\3\2\2\2\u0178\u017c\7H\2\2\u0179\u017b\7\64\2\2\u017a") + buf.write("\u0179\3\2\2\2\u017b\u017e\3\2\2\2\u017c\u017a\3\2\2\2") + buf.write("\u017c\u017d\3\2\2\2\u017d\u017f\3\2\2\2\u017e\u017c\3") + buf.write("\2\2\2\u017f\u0183\5D#\2\u0180\u0182\7\64\2\2\u0181\u0180") + buf.write("\3\2\2\2\u0182\u0185\3\2\2\2\u0183\u0181\3\2\2\2\u0183") + buf.write("\u0184\3\2\2\2\u0184\u0187\3\2\2\2\u0185\u0183\3\2\2\2") + buf.write("\u0186\u016e\3\2\2\2\u0186\u0178\3\2\2\2\u0187-\3\2\2") + buf.write("\2\u0188\u0189\7G\2\2\u0189\u018a\7+\2\2\u018a\u018b\5") + buf.write("\60\31\2\u018b/\3\2\2\2\u018c\u018e\5\62\32\2\u018d\u018f") + buf.write("\58\35\2\u018e\u018d\3\2\2\2\u018e\u018f\3\2\2\2\u018f") + buf.write("\61\3\2\2\2\u0190\u0191\t\3\2\2\u0191\63\3\2\2\2\u0192") + buf.write("\u0198\7G\2\2\u0193\u0194\7,\2\2\u0194\u0196\7G\2\2\u0195") + buf.write("\u0197\58\35\2\u0196\u0195\3\2\2\2\u0196\u0197\3\2\2\2") + buf.write("\u0197\u0199\3\2\2\2\u0198\u0193\3\2\2\2\u0199\u019a\3") + buf.write("\2\2\2\u019a\u0198\3\2\2\2\u019a\u019b\3\2\2\2\u019b\u01a8") + buf.write("\3\2\2\2\u019c\u01a4\7G\2\2\u019d\u019e\7,\2\2\u019e\u01a0") + buf.write("\7G\2\2\u019f\u01a1\58\35\2\u01a0\u019f\3\2\2\2\u01a0") + buf.write("\u01a1\3\2\2\2\u01a1\u01a3\3\2\2\2\u01a2\u019d\3\2\2\2") + buf.write("\u01a3\u01a6\3\2\2\2\u01a4\u01a2\3\2\2\2\u01a4\u01a5\3") + buf.write("\2\2\2\u01a5\u01a8\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a7\u0192") + buf.write("\3\2\2\2\u01a7\u019c\3\2\2\2\u01a8\65\3\2\2\2\u01a9\u01aa") + buf.write("\7G\2\2\u01aa\u01ad\7+\2\2\u01ab\u01ae\5<\37\2\u01ac\u01ae") + buf.write("\5D#\2\u01ad\u01ab\3\2\2\2\u01ad\u01ac\3\2\2\2\u01ae\67") + buf.write("\3\2\2\2\u01af\u01b1\7\60\2\2\u01b0\u01b2\t\4\2\2\u01b1") + buf.write("\u01b0\3\2\2\2\u01b1\u01b2\3\2\2\2\u01b2\u01b3\3\2\2\2") + buf.write("\u01b3\u01b4\7\61\2\2\u01b49\3\2\2\2\u01b5\u01b7\7B\2") + buf.write("\2\u01b6\u01b5\3\2\2\2\u01b6\u01b7\3\2\2\2\u01b7\u01b8") + buf.write("\3\2\2\2\u01b8\u01b9\t\5\2\2\u01b9;\3\2\2\2\u01ba\u01c0") + buf.write("\7)\2\2\u01bb\u01c0\7*\2\2\u01bc\u01c0\5:\36\2\u01bd\u01c0") + buf.write("\7F\2\2\u01be\u01c0\5\64\33\2\u01bf\u01ba\3\2\2\2\u01bf") + buf.write("\u01bb\3\2\2\2\u01bf\u01bc\3\2\2\2\u01bf\u01bd\3\2\2\2") + buf.write("\u01bf\u01be\3\2\2\2\u01c0=\3\2\2\2\u01c1\u01c2\b \1\2") + buf.write("\u01c2\u01c3\7\65\2\2\u01c3\u01c4\5> \2\u01c4\u01c5\7") + buf.write("\66\2\2\u01c5\u01cd\3\2\2\2\u01c6\u01c7\5B\"\2\u01c7\u01c8") + buf.write("\5> \b\u01c8\u01cd\3\2\2\2\u01c9\u01cd\5<\37\2\u01ca\u01cd") + buf.write("\5\64\33\2\u01cb\u01cd\5p9\2\u01cc\u01c1\3\2\2\2\u01cc") + buf.write("\u01c6\3\2\2\2\u01cc\u01c9\3\2\2\2\u01cc\u01ca\3\2\2\2") + buf.write("\u01cc\u01cb\3\2\2\2\u01cd\u01e6\3\2\2\2\u01ce\u01cf\f") + buf.write("\r\2\2\u01cf\u01d0\7@\2\2\u01d0\u01e5\5> \16\u01d1\u01d2") + buf.write("\f\f\2\2\u01d2\u01d3\7A\2\2\u01d3\u01e5\5> \r\u01d4\u01d5") + buf.write("\f\13\2\2\u01d5\u01d6\7B\2\2\u01d6\u01e5\5> \f\u01d7\u01d8") + buf.write("\f\n\2\2\u01d8\u01d9\7C\2\2\u01d9\u01e5\5> \13\u01da\u01db") + buf.write("\f\t\2\2\u01db\u01dc\5@!\2\u01dc\u01dd\5> \n\u01dd\u01e5") + buf.write("\3\2\2\2\u01de\u01df\f\7\2\2\u01df\u01e0\7=\2\2\u01e0") + buf.write("\u01e5\5> \b\u01e1\u01e2\f\6\2\2\u01e2\u01e3\7>\2\2\u01e3") + buf.write("\u01e5\5> \7\u01e4\u01ce\3\2\2\2\u01e4\u01d1\3\2\2\2\u01e4") + buf.write("\u01d4\3\2\2\2\u01e4\u01d7\3\2\2\2\u01e4\u01da\3\2\2\2") + buf.write("\u01e4\u01de\3\2\2\2\u01e4\u01e1\3\2\2\2\u01e5\u01e8\3") + buf.write("\2\2\2\u01e6\u01e4\3\2\2\2\u01e6\u01e7\3\2\2\2\u01e7?") + buf.write("\3\2\2\2\u01e8\u01e6\3\2\2\2\u01e9\u01ea\t\6\2\2\u01ea") + buf.write("A\3\2\2\2\u01eb\u01ec\7?\2\2\u01ecC\3\2\2\2\u01ed\u01ee") + buf.write("\5H%\2\u01ee\u01f3\5F$\2\u01ef\u01f0\7Q\2\2\u01f0\u01f2") + buf.write("\5F$\2\u01f1\u01ef\3\2\2\2\u01f2\u01f5\3\2\2\2\u01f3\u01f1") + buf.write("\3\2\2\2\u01f3\u01f4\3\2\2\2\u01f4\u01f6\3\2\2\2\u01f5") + buf.write("\u01f3\3\2\2\2\u01f6\u01f7\7U\2\2\u01f7\u01fc\3\2\2\2") + buf.write("\u01f8\u01f9\5H%\2\u01f9\u01fa\7U\2\2\u01fa\u01fc\3\2") + buf.write("\2\2\u01fb\u01ed\3\2\2\2\u01fb\u01f8\3\2\2\2\u01fcE\3") + buf.write("\2\2\2\u01fd\u01fe\7I\2\2\u01fe\u01ff\7L\2\2\u01ff\u0200") + buf.write("\5J&\2\u0200G\3\2\2\2\u0201\u0202\t\7\2\2\u0202I\3\2\2") + buf.write("\2\u0203\u020a\7I\2\2\u0204\u020a\7J\2\2\u0205\u020a\7") + buf.write("K\2\2\u0206\u020a\7R\2\2\u0207\u020a\5D#\2\u0208\u020a") + buf.write("\5L\'\2\u0209\u0203\3\2\2\2\u0209\u0204\3\2\2\2\u0209") + buf.write("\u0205\3\2\2\2\u0209\u0206\3\2\2\2\u0209\u0207\3\2\2\2") + buf.write("\u0209\u0208\3\2\2\2\u020aK\3\2\2\2\u020b\u020c\7O\2\2") + buf.write("\u020c\u0211\5J&\2\u020d\u020e\7Q\2\2\u020e\u0210\5J&") + buf.write("\2\u020f\u020d\3\2\2\2\u0210\u0213\3\2\2\2\u0211\u020f") + buf.write("\3\2\2\2\u0211\u0212\3\2\2\2\u0212\u0214\3\2\2\2\u0213") + buf.write("\u0211\3\2\2\2\u0214\u0215\7P\2\2\u0215\u0219\3\2\2\2") + buf.write("\u0216\u0217\7O\2\2\u0217\u0219\7P\2\2\u0218\u020b\3\2") + buf.write("\2\2\u0218\u0216\3\2\2\2\u0219M\3\2\2\2\u021a\u021b\7") + buf.write("\b\2\2\u021b\u021c\7\64\2\2\u021c\u021d\7\13\2\2\u021d") + buf.write("\u021e\5P)\2\u021e\u021f\7 \2\2\u021f\u0220\5P)\2\u0220") + buf.write("O\3\2\2\2\u0221\u0226\7G\2\2\u0222\u0223\7-\2\2\u0223") + buf.write("\u0225\7G\2\2\u0224\u0222\3\2\2\2\u0225\u0228\3\2\2\2") + buf.write("\u0226\u0224\3\2\2\2\u0226\u0227\3\2\2\2\u0227\u0229\3") + buf.write("\2\2\2\u0228\u0226\3\2\2\2\u0229\u022a\7\64\2\2\u022a") + buf.write("Q\3\2\2\2\u022b\u022c\7\t\2\2\u022c\u022d\7\64\2\2\u022d") + buf.write("\u022e\7 \2\2\u022e\u022f\7G\2\2\u022f\u0230\7\64\2\2") + buf.write("\u0230S\3\2\2\2\u0231\u0232\7\n\2\2\u0232\u0233\7\64\2") + buf.write("\2\u0233\u0234\7\f\2\2\u0234\u0235\7G\2\2\u0235\u0236") + buf.write("\7\64\2\2\u0236U\3\2\2\2\u0237\u023a\7\22\2\2\u0238\u023b") + buf.write("\5> \2\u0239\u023b\5D#\2\u023a\u0238\3\2\2\2\u023a\u0239") + buf.write("\3\2\2\2\u023b\u023c\3\2\2\2\u023c\u023d\7\64\2\2\u023d") + buf.write("W\3\2\2\2\u023e\u023f\7\20\2\2\u023f\u0240\7G\2\2\u0240") + buf.write("\u0242\7\3\2\2\u0241\u0243\5Z.\2\u0242\u0241\3\2\2\2\u0243") + buf.write("\u0244\3\2\2\2\u0244\u0242\3\2\2\2\u0244\u0245\3\2\2\2") + buf.write("\u0245\u0246\3\2\2\2\u0246\u0247\7\4\2\2\u0247\u0248\7") + buf.write("%\2\2\u0248Y\3\2\2\2\u0249\u024d\5f\64\2\u024a\u024d\5") + buf.write("j\66\2\u024b\u024d\5l\67\2\u024c\u0249\3\2\2\2\u024c\u024a") + buf.write("\3\2\2\2\u024c\u024b\3\2\2\2\u024d[\3\2\2\2\u024e\u024f") + buf.write("\7\21\2\2\u024f\u0250\7G\2\2\u0250\u0252\7\3\2\2\u0251") + buf.write("\u0253\5^\60\2\u0252\u0251\3\2\2\2\u0253\u0254\3\2\2\2") + buf.write("\u0254\u0252\3\2\2\2\u0254\u0255\3\2\2\2\u0255\u0256\3") + buf.write("\2\2\2\u0256\u0257\7\4\2\2\u0257\u0258\7%\2\2\u0258]\3") + buf.write("\2\2\2\u0259\u025d\5h\65\2\u025a\u025d\5j\66\2\u025b\u025d") + buf.write("\5l\67\2\u025c\u0259\3\2\2\2\u025c\u025a\3\2\2\2\u025c") + buf.write("\u025b\3\2\2\2\u025d_\3\2\2\2\u025e\u0262\5b\62\2\u025f") + buf.write("\u0262\5X-\2\u0260\u0262\5\\/\2\u0261\u025e\3\2\2\2\u0261") + buf.write("\u025f\3\2\2\2\u0261\u0260\3\2\2\2\u0262a\3\2\2\2\u0263") + buf.write("\u0264\7\17\2\2\u0264\u0265\7G\2\2\u0265\u0267\7\3\2\2") + buf.write("\u0266\u0268\5d\63\2\u0267\u0266\3\2\2\2\u0268\u0269\3") + buf.write("\2\2\2\u0269\u0267\3\2\2\2\u0269\u026a\3\2\2\2\u026a\u026b") + buf.write("\3\2\2\2\u026b\u026c\7\4\2\2\u026c\u026d\7%\2\2\u026d") + buf.write("c\3\2\2\2\u026e\u0273\5f\64\2\u026f\u0273\5h\65\2\u0270") + buf.write("\u0273\5j\66\2\u0271\u0273\5l\67\2\u0272\u026e\3\2\2\2") + buf.write("\u0272\u026f\3\2\2\2\u0272\u0270\3\2\2\2\u0272\u0271\3") + buf.write("\2\2\2\u0273e\3\2\2\2\u0274\u0275\7\26\2\2\u0275\u0276") + buf.write("\7G\2\2\u0276\u0277\7\64\2\2\u0277g\3\2\2\2\u0278\u027b") + buf.write("\7\23\2\2\u0279\u027c\5<\37\2\u027a\u027c\5D#\2\u027b") + buf.write("\u0279\3\2\2\2\u027b\u027a\3\2\2\2\u027c\u027d\3\2\2\2") + buf.write("\u027d\u027e\7\64\2\2\u027ei\3\2\2\2\u027f\u0280\7\24") + buf.write("\2\2\u0280\u0281\5> \2\u0281\u0282\7\64\2\2\u0282\u0288") + buf.write("\3\2\2\2\u0283\u0284\7\25\2\2\u0284\u0285\5> \2\u0285") + buf.write("\u0286\7\64\2\2\u0286\u0288\3\2\2\2\u0287\u027f\3\2\2") + buf.write("\2\u0287\u0283\3\2\2\2\u0288k\3\2\2\2\u0289\u028a\7\16") + buf.write("\2\2\u028a\u028b\7G\2\2\u028b\u028c\7\64\2\2\u028cm\3") + buf.write("\2\2\2\u028d\u028e\7\5\2\2\u028e\u028f\5p9\2\u028f\u0293") + buf.write("\7\3\2\2\u0290\u0291\5> \2\u0291\u0292\7\64\2\2\u0292") + buf.write("\u0294\3\2\2\2\u0293\u0290\3\2\2\2\u0294\u0295\3\2\2\2") + buf.write("\u0295\u0293\3\2\2\2\u0295\u0296\3\2\2\2\u0296\u0297\3") + buf.write("\2\2\2\u0297\u0298\7\4\2\2\u0298\u0299\7%\2\2\u0299o\3") + buf.write("\2\2\2\u029a\u029b\7G\2\2\u029b\u02a4\7\65\2\2\u029c\u02a1") + buf.write("\5r:\2\u029d\u029e\7-\2\2\u029e\u02a0\5r:\2\u029f\u029d") + buf.write("\3\2\2\2\u02a0\u02a3\3\2\2\2\u02a1\u029f\3\2\2\2\u02a1") + buf.write("\u02a2\3\2\2\2\u02a2\u02a5\3\2\2\2\u02a3\u02a1\3\2\2\2") + buf.write("\u02a4\u029c\3\2\2\2\u02a4\u02a5\3\2\2\2\u02a5\u02a6\3") + buf.write("\2\2\2\u02a6\u02a7\7\66\2\2\u02a7q\3\2\2\2\u02a8\u02ab") + buf.write("\7G\2\2\u02a9\u02ab\5<\37\2\u02aa\u02a8\3\2\2\2\u02aa") + buf.write("\u02a9\3\2\2\2\u02ab\u02ae\3\2\2\2\u02ac\u02ad\7\31\2") + buf.write("\2\u02ad\u02af\5<\37\2\u02ae\u02ac\3\2\2\2\u02ae\u02af") + buf.write("\3\2\2\2\u02afs\3\2\2\2Ow\u0082\u0088\u008f\u0093\u009a") + buf.write("\u00a0\u00a5\u00a8\u00b5\u00c0\u00c4\u00ce\u00d2\u00d9") + buf.write("\u00e4\u00ea\u00ef\u00f2\u00f5\u00fb\u0100\u0103\u0106") + buf.write("\u010e\u0111\u0113\u011d\u0121\u012a\u0135\u013a\u0141") + buf.write("\u0147\u0151\u0156\u015d\u0166\u016c\u0174\u017c\u0183") + buf.write("\u0186\u018e\u0196\u019a\u01a0\u01a4\u01a7\u01ad\u01b1") + buf.write("\u01b6\u01bf\u01cc\u01e4\u01e6\u01f3\u01fb\u0209\u0211") + buf.write("\u0218\u0226\u023a\u0244\u024c\u0254\u025c\u0261\u0269") + buf.write("\u0272\u027b\u0287\u0295\u02a1\u02a4\u02aa\u02ae") return buf.getvalue() @@ -233,143 +360,202 @@ class PFDLParser ( Parser ): sharedContextCache = PredictionContextCache() - literalNames = [ "", "", "", "'Struct'", - "'Task'", "'In'", "'Out'", "'Loop'", "'While'", "'To'", - "'Parallel'", "'Condition'", "'Passed'", "'Failed'", - "'OnDone'", "'End'", "'number'", "'string'", "'boolean'", - "", "", "", "'.'", "", + literalNames = [ "", "", "", "'Rule'", "'Module'", + "'Import'", "'Transport'", "'Move'", "'Action'", "'From'", + "'Do'", "'Repeat'", "'OnDone'", "'TransportOrderStep'", + "'MoveOrderStep'", "'ActionOrderStep'", "'Constraints'", + "'Parameters'", "'StartedBy'", "'FinishedBy'", "'Location'", + "'Event'", "'Time'", "'='", "'Struct'", "'Task'", "'In'", + "'Out'", "'Loop'", "'While'", "'To'", "'Parallel'", + "'Condition'", "'Passed'", "'Failed'", "'End'", "'number'", + "'string'", "'boolean'", "", "", + "", "'.'", "", "", "", "", "", "", "", - "", "", "", "'('", "')'", - "'<'", "'<='", "'>'", "'>='", "'=='", "'!='", "'And'", - "'Or'", "'!'", "'*'", "'/'", "'-'", "'+'", "", + "", "'('", "')'", "'<'", "'<='", "'>'", "'>='", + "'=='", "'!='", "'And'", "'Or'", "'!'", "'*'", "'/'", + "'-'", "'+'", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", - "", "", "", "", - "'}'" ] - - symbolicNames = [ "", "INDENT", "DEDENT", "STRUCT", "TASK", - "IN", "OUT", "LOOP", "WHILE", "TO", "PARALLEL", "CONDITION", - "PASSED", "FAILED", "ON_DONE", "END", "NUMBER_P", - "STRING_P", "BOOLEAN_P", "TRUE", "FALSE", "COLON", - "DOT", "COMMA", "JSON_OPEN", "QUOTE", "ARRAY_LEFT", - "ARRAY_RIGHT", "COMMENT", "WHITESPACE", "NL", "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", "LESS_THAN", "LESS_THAN_OR_EQUAL", - "GREATER_THAN", "GREATER_THAN_OR_EQUAL", "EQUAL", - "NOT_EQUAL", "BOOLEAN_AND", "BOOLEAN_OR", "BOOLEAN_NOT", - "STAR", "SLASH", "MINUS", "PLUS", "INTEGER", "FLOAT", - "STRING", "STARTS_WITH_LOWER_C_STR", "STARTS_WITH_UPPER_C_STR", + "", "", "'}'" ] + + symbolicNames = [ "", "INDENT", "DEDENT", "RULE", "MODULE", + "IMPORT", "TRANSPORT", "MOVE", "ACTION", "FROM", "DO", + "REPEAT", "ON_DONE", "TRANSPORT_ORDER_STEP", "MOVE_ORDER_STEP", + "ACTION_ORDER_STEP", "CONSTRAINTS", "PARAMETERS", + "STARTED_BY", "FINISHED_BY", "LOCATION", "EVENT", + "TIME", "ASSIGNMENT", "STRUCT", "TASK", "IN", "OUT", + "LOOP", "WHILE", "TO", "PARALLEL", "CONDITION", "PASSED", + "FAILED", "END", "NUMBER_P", "STRING_P", "BOOLEAN_P", + "TRUE", "FALSE", "COLON", "DOT", "COMMA", "JSON_OPEN", + "QUOTE", "ARRAY_LEFT", "ARRAY_RIGHT", "COMMENT", "WHITESPACE", + "NL", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LESS_THAN", + "LESS_THAN_OR_EQUAL", "GREATER_THAN", "GREATER_THAN_OR_EQUAL", + "EQUAL", "NOT_EQUAL", "BOOLEAN_AND", "BOOLEAN_OR", + "BOOLEAN_NOT", "STAR", "SLASH", "MINUS", "PLUS", "INTEGER", + "FLOAT", "STRING", "STARTS_WITH_LOWER_C_STR", "STARTS_WITH_UPPER_C_STR", "JSON_STRING", "JSON_TRUE", "JSON_FALSE", "JSON_COLON", "JSON_QUOTE", "JSON_COMMENT", "JSON_ARRAY_LEFT", "JSON_ARRAY_RIGHT", "JSON_COMMA", "NUMBER", "WS", "JSON_OPEN_2", "JSON_CLOSE" ] RULE_program = 0 - RULE_struct = 1 - RULE_task = 2 - RULE_task_in = 3 - RULE_task_out = 4 - RULE_statement = 5 - RULE_service_call = 6 - RULE_task_call = 7 - RULE_call_input = 8 - RULE_call_output = 9 - RULE_parallel = 10 - RULE_while_loop = 11 - RULE_counting_loop = 12 - RULE_condition = 13 - RULE_condition_passed = 14 - RULE_condition_failed = 15 - RULE_parameter = 16 - RULE_struct_initialization = 17 - RULE_variable_definition = 18 - RULE_variable_type = 19 - RULE_primitive = 20 - RULE_attribute_access = 21 - RULE_array = 22 - RULE_number = 23 - RULE_value = 24 - RULE_expression = 25 - RULE_binOperation = 26 - RULE_unOperation = 27 - RULE_json_object = 28 - RULE_pair = 29 - RULE_json_open_bracket = 30 - RULE_json_value = 31 - RULE_json_array = 32 - - ruleNames = [ "program", "struct", "task", "task_in", "task_out", "statement", - "service_call", "task_call", "call_input", "call_output", - "parallel", "while_loop", "counting_loop", "condition", - "condition_passed", "condition_failed", "parameter", - "struct_initialization", "variable_definition", "variable_type", - "primitive", "attribute_access", "array", "number", "value", - "expression", "binOperation", "unOperation", "json_object", - "pair", "json_open_bracket", "json_value", "json_array" ] + RULE_program_statement = 1 + RULE_struct = 2 + RULE_struct_id = 3 + RULE_task = 4 + RULE_instance = 5 + RULE_task_in = 6 + RULE_task_out = 7 + RULE_taskStatement = 8 + RULE_statement = 9 + RULE_service_call = 10 + RULE_task_call = 11 + RULE_call_input = 12 + RULE_call_output = 13 + RULE_parallel = 14 + RULE_while_loop = 15 + RULE_counting_loop = 16 + RULE_condition = 17 + RULE_condition_passed = 18 + RULE_condition_failed = 19 + RULE_parameter = 20 + RULE_struct_initialization = 21 + RULE_variable_definition = 22 + RULE_variable_type = 23 + RULE_primitive = 24 + RULE_attribute_access = 25 + RULE_attribute_assignment = 26 + RULE_array = 27 + RULE_number = 28 + RULE_value = 29 + RULE_expression = 30 + RULE_binOperation = 31 + RULE_unOperation = 32 + RULE_json_object = 33 + RULE_pair = 34 + RULE_json_open_bracket = 35 + RULE_json_value = 36 + RULE_json_array = 37 + RULE_transportStatement = 38 + RULE_tosCollectionStatement = 39 + RULE_moveStatement = 40 + RULE_actionStatement = 41 + RULE_constraintStatement = 42 + RULE_moveOrderStep = 43 + RULE_mosStatement = 44 + RULE_actionOrderStep = 45 + RULE_aosStatement = 46 + RULE_orderStep = 47 + RULE_transportOrderStep = 48 + RULE_tosStatement = 49 + RULE_locationStatement = 50 + RULE_parameterStatement = 51 + RULE_eventStatement = 52 + RULE_onDoneStatement = 53 + RULE_rule_ = 54 + RULE_rule_call = 55 + RULE_rule_parameter = 56 + + ruleNames = [ "program", "program_statement", "struct", "struct_id", + "task", "instance", "task_in", "task_out", "taskStatement", + "statement", "service_call", "task_call", "call_input", + "call_output", "parallel", "while_loop", "counting_loop", + "condition", "condition_passed", "condition_failed", + "parameter", "struct_initialization", "variable_definition", + "variable_type", "primitive", "attribute_access", "attribute_assignment", + "array", "number", "value", "expression", "binOperation", + "unOperation", "json_object", "pair", "json_open_bracket", + "json_value", "json_array", "transportStatement", "tosCollectionStatement", + "moveStatement", "actionStatement", "constraintStatement", + "moveOrderStep", "mosStatement", "actionOrderStep", "aosStatement", + "orderStep", "transportOrderStep", "tosStatement", "locationStatement", + "parameterStatement", "eventStatement", "onDoneStatement", + "rule_", "rule_call", "rule_parameter" ] EOF = Token.EOF INDENT=1 DEDENT=2 - STRUCT=3 - TASK=4 - IN=5 - OUT=6 - LOOP=7 - WHILE=8 - TO=9 - PARALLEL=10 - CONDITION=11 - PASSED=12 - FAILED=13 - ON_DONE=14 - END=15 - NUMBER_P=16 - STRING_P=17 - BOOLEAN_P=18 - TRUE=19 - FALSE=20 - COLON=21 - DOT=22 - COMMA=23 - JSON_OPEN=24 - QUOTE=25 - ARRAY_LEFT=26 - ARRAY_RIGHT=27 - COMMENT=28 - WHITESPACE=29 - NL=30 - LEFT_PARENTHESIS=31 - RIGHT_PARENTHESIS=32 - LESS_THAN=33 - LESS_THAN_OR_EQUAL=34 - GREATER_THAN=35 - GREATER_THAN_OR_EQUAL=36 - EQUAL=37 - NOT_EQUAL=38 - BOOLEAN_AND=39 - BOOLEAN_OR=40 - BOOLEAN_NOT=41 - STAR=42 - SLASH=43 - MINUS=44 - PLUS=45 - INTEGER=46 - FLOAT=47 - STRING=48 - STARTS_WITH_LOWER_C_STR=49 - STARTS_WITH_UPPER_C_STR=50 - JSON_STRING=51 - JSON_TRUE=52 - JSON_FALSE=53 - JSON_COLON=54 - JSON_QUOTE=55 - JSON_COMMENT=56 - JSON_ARRAY_LEFT=57 - JSON_ARRAY_RIGHT=58 - JSON_COMMA=59 - NUMBER=60 - WS=61 - JSON_OPEN_2=62 - JSON_CLOSE=63 + RULE=3 + MODULE=4 + IMPORT=5 + TRANSPORT=6 + MOVE=7 + ACTION=8 + FROM=9 + DO=10 + REPEAT=11 + ON_DONE=12 + TRANSPORT_ORDER_STEP=13 + MOVE_ORDER_STEP=14 + ACTION_ORDER_STEP=15 + CONSTRAINTS=16 + PARAMETERS=17 + STARTED_BY=18 + FINISHED_BY=19 + LOCATION=20 + EVENT=21 + TIME=22 + ASSIGNMENT=23 + STRUCT=24 + TASK=25 + IN=26 + OUT=27 + LOOP=28 + WHILE=29 + TO=30 + PARALLEL=31 + CONDITION=32 + PASSED=33 + FAILED=34 + END=35 + NUMBER_P=36 + STRING_P=37 + BOOLEAN_P=38 + TRUE=39 + FALSE=40 + COLON=41 + DOT=42 + COMMA=43 + JSON_OPEN=44 + QUOTE=45 + ARRAY_LEFT=46 + ARRAY_RIGHT=47 + COMMENT=48 + WHITESPACE=49 + NL=50 + LEFT_PARENTHESIS=51 + RIGHT_PARENTHESIS=52 + LESS_THAN=53 + LESS_THAN_OR_EQUAL=54 + GREATER_THAN=55 + GREATER_THAN_OR_EQUAL=56 + EQUAL=57 + NOT_EQUAL=58 + BOOLEAN_AND=59 + BOOLEAN_OR=60 + BOOLEAN_NOT=61 + STAR=62 + SLASH=63 + MINUS=64 + PLUS=65 + INTEGER=66 + FLOAT=67 + STRING=68 + STARTS_WITH_LOWER_C_STR=69 + STARTS_WITH_UPPER_C_STR=70 + JSON_STRING=71 + JSON_TRUE=72 + JSON_FALSE=73 + JSON_COLON=74 + JSON_QUOTE=75 + JSON_COMMENT=76 + JSON_ARRAY_LEFT=77 + JSON_ARRAY_RIGHT=78 + JSON_COMMA=79 + NUMBER=80 + WS=81 + JSON_OPEN_2=82 + JSON_CLOSE=83 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) @@ -390,24 +576,11 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): def EOF(self): return self.getToken(PFDLParser.EOF, 0) - def NL(self, i:int=None): - if i is None: - return self.getTokens(PFDLParser.NL) - else: - return self.getToken(PFDLParser.NL, i) - - def struct(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PFDLParser.StructContext) - else: - return self.getTypedRuleContext(PFDLParser.StructContext,i) - - - def task(self, i:int=None): + def program_statement(self, i:int=None): if i is None: - return self.getTypedRuleContexts(PFDLParser.TaskContext) + return self.getTypedRuleContexts(PFDLParser.Program_statementContext) else: - return self.getTypedRuleContext(PFDLParser.TaskContext,i) + return self.getTypedRuleContext(PFDLParser.Program_statementContext,i) def getRuleIndex(self): @@ -437,33 +610,17 @@ def program(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 71 + self.state = 117 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.STRUCT) | (1 << PFDLParser.TASK) | (1 << PFDLParser.NL))) != 0): - self.state = 69 - self._errHandler.sync(self) - token = self._input.LA(1) - if token in [PFDLParser.NL]: - self.state = 66 - self.match(PFDLParser.NL) - pass - elif token in [PFDLParser.STRUCT]: - self.state = 67 - self.struct() - pass - elif token in [PFDLParser.TASK]: - self.state = 68 - self.task() - pass - else: - raise NoViableAltException(self) - - self.state = 73 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.RULE) | (1 << PFDLParser.TRANSPORT_ORDER_STEP) | (1 << PFDLParser.MOVE_ORDER_STEP) | (1 << PFDLParser.ACTION_ORDER_STEP) | (1 << PFDLParser.LOCATION) | (1 << PFDLParser.EVENT) | (1 << PFDLParser.TIME) | (1 << PFDLParser.STRUCT) | (1 << PFDLParser.TASK) | (1 << PFDLParser.NL))) != 0) or _la==PFDLParser.STARTS_WITH_UPPER_C_STR: + self.state = 114 + self.program_statement() + self.state = 119 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 74 + self.state = 120 self.match(PFDLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -474,6 +631,106 @@ def program(self): return localctx + class Program_statementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def NL(self): + return self.getToken(PFDLParser.NL, 0) + + def struct(self): + return self.getTypedRuleContext(PFDLParser.StructContext,0) + + + def task(self): + return self.getTypedRuleContext(PFDLParser.TaskContext,0) + + + def instance(self): + return self.getTypedRuleContext(PFDLParser.InstanceContext,0) + + + def rule_(self): + return self.getTypedRuleContext(PFDLParser.Rule_Context,0) + + + def orderStep(self): + return self.getTypedRuleContext(PFDLParser.OrderStepContext,0) + + + def getRuleIndex(self): + return PFDLParser.RULE_program_statement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterProgram_statement" ): + listener.enterProgram_statement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitProgram_statement" ): + listener.exitProgram_statement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitProgram_statement" ): + return visitor.visitProgram_statement(self) + else: + return visitor.visitChildren(self) + + + + + def program_statement(self): + + localctx = PFDLParser.Program_statementContext(self, self._ctx, self.state) + self.enterRule(localctx, 2, self.RULE_program_statement) + try: + self.state = 128 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [PFDLParser.NL]: + self.enterOuterAlt(localctx, 1) + self.state = 122 + self.match(PFDLParser.NL) + pass + elif token in [PFDLParser.STRUCT]: + self.enterOuterAlt(localctx, 2) + self.state = 123 + self.struct() + pass + elif token in [PFDLParser.TASK]: + self.enterOuterAlt(localctx, 3) + self.state = 124 + self.task() + pass + elif token in [PFDLParser.LOCATION, PFDLParser.EVENT, PFDLParser.TIME, PFDLParser.STARTS_WITH_UPPER_C_STR]: + self.enterOuterAlt(localctx, 4) + self.state = 125 + self.instance() + pass + elif token in [PFDLParser.RULE]: + self.enterOuterAlt(localctx, 5) + self.state = 126 + self.rule_() + pass + elif token in [PFDLParser.TRANSPORT_ORDER_STEP, PFDLParser.MOVE_ORDER_STEP, PFDLParser.ACTION_ORDER_STEP]: + self.enterOuterAlt(localctx, 6) + self.state = 127 + self.orderStep() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class StructContext(ParserRuleContext): __slots__ = 'parser' @@ -496,6 +753,13 @@ def DEDENT(self): def END(self): return self.getToken(PFDLParser.END, 0) + def COLON(self): + return self.getToken(PFDLParser.COLON, 0) + + def struct_id(self): + return self.getTypedRuleContext(PFDLParser.Struct_idContext,0) + + def variable_definition(self, i:int=None): if i is None: return self.getTypedRuleContexts(PFDLParser.Variable_definitionContext) @@ -532,43 +796,53 @@ def accept(self, visitor:ParseTreeVisitor): def struct(self): localctx = PFDLParser.StructContext(self, self._ctx, self.state) - self.enterRule(localctx, 2, self.RULE_struct) + self.enterRule(localctx, 4, self.RULE_struct) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 76 + self.state = 130 self.match(PFDLParser.STRUCT) - self.state = 77 + self.state = 131 self.match(PFDLParser.STARTS_WITH_UPPER_C_STR) - self.state = 78 + self.state = 134 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PFDLParser.COLON: + self.state = 132 + self.match(PFDLParser.COLON) + self.state = 133 + self.struct_id() + + + self.state = 136 self.match(PFDLParser.INDENT) - self.state = 85 + self.state = 143 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 79 + self.state = 137 self.variable_definition() - self.state = 81 + self.state = 139 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 80 + self.state = 138 self.match(PFDLParser.NL) - self.state = 83 + self.state = 141 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.NL): break - self.state = 87 + self.state = 145 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.STARTS_WITH_LOWER_C_STR): break - self.state = 89 + self.state = 147 self.match(PFDLParser.DEDENT) - self.state = 90 + self.state = 148 self.match(PFDLParser.END) except RecognitionException as re: localctx.exception = re @@ -579,6 +853,81 @@ def struct(self): return localctx + class Struct_idContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def STARTS_WITH_UPPER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_UPPER_C_STR, 0) + + def LOCATION(self): + return self.getToken(PFDLParser.LOCATION, 0) + + def EVENT(self): + return self.getToken(PFDLParser.EVENT, 0) + + def TIME(self): + return self.getToken(PFDLParser.TIME, 0) + + def getRuleIndex(self): + return PFDLParser.RULE_struct_id + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterStruct_id" ): + listener.enterStruct_id(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitStruct_id" ): + listener.exitStruct_id(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitStruct_id" ): + return visitor.visitStruct_id(self) + else: + return visitor.visitChildren(self) + + + + + def struct_id(self): + + localctx = PFDLParser.Struct_idContext(self, self._ctx, self.state) + self.enterRule(localctx, 6, self.RULE_struct_id) + self._la = 0 # Token type + try: + self.state = 152 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,5,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 150 + self.match(PFDLParser.STARTS_WITH_UPPER_C_STR) + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 151 + _la = self._input.LA(1) + if not(((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (PFDLParser.LOCATION - 20)) | (1 << (PFDLParser.EVENT - 20)) | (1 << (PFDLParser.TIME - 20)) | (1 << (PFDLParser.STARTS_WITH_UPPER_C_STR - 20)))) != 0)): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class TaskContext(ParserRuleContext): __slots__ = 'parser' @@ -605,11 +954,11 @@ def task_in(self): return self.getTypedRuleContext(PFDLParser.Task_inContext,0) - def statement(self, i:int=None): + def taskStatement(self, i:int=None): if i is None: - return self.getTypedRuleContexts(PFDLParser.StatementContext) + return self.getTypedRuleContexts(PFDLParser.TaskStatementContext) else: - return self.getTypedRuleContext(PFDLParser.StatementContext,i) + return self.getTypedRuleContext(PFDLParser.TaskStatementContext,i) def task_out(self): @@ -639,47 +988,143 @@ def accept(self, visitor:ParseTreeVisitor): def task(self): localctx = PFDLParser.TaskContext(self, self._ctx, self.state) - self.enterRule(localctx, 4, self.RULE_task) + self.enterRule(localctx, 8, self.RULE_task) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 92 + self.state = 154 self.match(PFDLParser.TASK) - self.state = 93 + self.state = 155 self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) - self.state = 94 + self.state = 156 self.match(PFDLParser.INDENT) - self.state = 96 + self.state = 158 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.IN: - self.state = 95 + self.state = 157 self.task_in() - self.state = 99 + self.state = 161 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 98 - self.statement() - self.state = 101 + self.state = 160 + self.taskStatement() + self.state = 163 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.LOOP) | (1 << PFDLParser.PARALLEL) | (1 << PFDLParser.CONDITION) | (1 << PFDLParser.STARTS_WITH_LOWER_C_STR) | (1 << PFDLParser.STARTS_WITH_UPPER_C_STR))) != 0)): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.TRANSPORT) | (1 << PFDLParser.MOVE) | (1 << PFDLParser.ACTION) | (1 << PFDLParser.CONSTRAINTS) | (1 << PFDLParser.STARTED_BY) | (1 << PFDLParser.FINISHED_BY) | (1 << PFDLParser.LOOP) | (1 << PFDLParser.PARALLEL) | (1 << PFDLParser.CONDITION))) != 0) or _la==PFDLParser.STARTS_WITH_LOWER_C_STR or _la==PFDLParser.STARTS_WITH_UPPER_C_STR): break - self.state = 104 + self.state = 166 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.OUT: - self.state = 103 + self.state = 165 self.task_out() - self.state = 106 + self.state = 168 + self.match(PFDLParser.DEDENT) + self.state = 169 + self.match(PFDLParser.END) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class InstanceContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def struct_id(self): + return self.getTypedRuleContext(PFDLParser.Struct_idContext,0) + + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def INDENT(self): + return self.getToken(PFDLParser.INDENT, 0) + + def DEDENT(self): + return self.getToken(PFDLParser.DEDENT, 0) + + def END(self): + return self.getToken(PFDLParser.END, 0) + + def attribute_assignment(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PFDLParser.Attribute_assignmentContext) + else: + return self.getTypedRuleContext(PFDLParser.Attribute_assignmentContext,i) + + + def NL(self, i:int=None): + if i is None: + return self.getTokens(PFDLParser.NL) + else: + return self.getToken(PFDLParser.NL, i) + + def getRuleIndex(self): + return PFDLParser.RULE_instance + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterInstance" ): + listener.enterInstance(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitInstance" ): + listener.exitInstance(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitInstance" ): + return visitor.visitInstance(self) + else: + return visitor.visitChildren(self) + + + + + def instance(self): + + localctx = PFDLParser.InstanceContext(self, self._ctx, self.state) + self.enterRule(localctx, 10, self.RULE_instance) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 171 + self.struct_id() + self.state = 172 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 173 + self.match(PFDLParser.INDENT) + self.state = 177 + self._errHandler.sync(self) + _la = self._input.LA(1) + while True: + self.state = 174 + self.attribute_assignment() + self.state = 175 + self.match(PFDLParser.NL) + self.state = 179 + self._errHandler.sync(self) + _la = self._input.LA(1) + if not (_la==PFDLParser.STARTS_WITH_LOWER_C_STR): + break + + self.state = 181 self.match(PFDLParser.DEDENT) - self.state = 107 + self.state = 182 self.match(PFDLParser.END) except RecognitionException as re: localctx.exception = re @@ -742,39 +1187,39 @@ def accept(self, visitor:ParseTreeVisitor): def task_in(self): localctx = PFDLParser.Task_inContext(self, self._ctx, self.state) - self.enterRule(localctx, 6, self.RULE_task_in) + self.enterRule(localctx, 12, self.RULE_task_in) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 109 + self.state = 184 self.match(PFDLParser.IN) - self.state = 110 + self.state = 185 self.match(PFDLParser.INDENT) - self.state = 117 + self.state = 192 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 111 + self.state = 186 self.variable_definition() - self.state = 113 + self.state = 188 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 112 + self.state = 187 self.match(PFDLParser.NL) - self.state = 115 + self.state = 190 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.NL): break - self.state = 119 + self.state = 194 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.STARTS_WITH_LOWER_C_STR): break - self.state = 121 + self.state = 196 self.match(PFDLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -836,39 +1281,39 @@ def accept(self, visitor:ParseTreeVisitor): def task_out(self): localctx = PFDLParser.Task_outContext(self, self._ctx, self.state) - self.enterRule(localctx, 8, self.RULE_task_out) + self.enterRule(localctx, 14, self.RULE_task_out) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 123 + self.state = 198 self.match(PFDLParser.OUT) - self.state = 124 + self.state = 199 self.match(PFDLParser.INDENT) - self.state = 131 + self.state = 206 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 125 + self.state = 200 self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) - self.state = 127 + self.state = 202 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 126 + self.state = 201 self.match(PFDLParser.NL) - self.state = 129 + self.state = 204 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.NL): break - self.state = 133 + self.state = 208 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.STARTS_WITH_LOWER_C_STR): break - self.state = 135 + self.state = 210 self.match(PFDLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -879,101 +1324,205 @@ def task_out(self): return localctx - class StatementContext(ParserRuleContext): + class TaskStatementContext(ParserRuleContext): __slots__ = 'parser' def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser - def service_call(self): - return self.getTypedRuleContext(PFDLParser.Service_callContext,0) - - - def task_call(self): - return self.getTypedRuleContext(PFDLParser.Task_callContext,0) - - - def parallel(self): - return self.getTypedRuleContext(PFDLParser.ParallelContext,0) - - - def while_loop(self): - return self.getTypedRuleContext(PFDLParser.While_loopContext,0) + def statement(self): + return self.getTypedRuleContext(PFDLParser.StatementContext,0) - def counting_loop(self): - return self.getTypedRuleContext(PFDLParser.Counting_loopContext,0) + def eventStatement(self): + return self.getTypedRuleContext(PFDLParser.EventStatementContext,0) - def condition(self): - return self.getTypedRuleContext(PFDLParser.ConditionContext,0) + def constraintStatement(self): + return self.getTypedRuleContext(PFDLParser.ConstraintStatementContext,0) def getRuleIndex(self): - return PFDLParser.RULE_statement + return PFDLParser.RULE_taskStatement def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterStatement" ): - listener.enterStatement(self) + if hasattr( listener, "enterTaskStatement" ): + listener.enterTaskStatement(self) def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitStatement" ): - listener.exitStatement(self) + if hasattr( listener, "exitTaskStatement" ): + listener.exitTaskStatement(self) def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitStatement" ): - return visitor.visitStatement(self) + if hasattr( visitor, "visitTaskStatement" ): + return visitor.visitTaskStatement(self) else: return visitor.visitChildren(self) - def statement(self): + def taskStatement(self): - localctx = PFDLParser.StatementContext(self, self._ctx, self.state) - self.enterRule(localctx, 10, self.RULE_statement) + localctx = PFDLParser.TaskStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 16, self.RULE_taskStatement) try: - self.state = 143 + self.state = 215 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,11,self._ctx) - if la_ == 1: + token = self._input.LA(1) + if token in [PFDLParser.TRANSPORT, PFDLParser.MOVE, PFDLParser.ACTION, PFDLParser.LOOP, PFDLParser.PARALLEL, PFDLParser.CONDITION, PFDLParser.STARTS_WITH_LOWER_C_STR, PFDLParser.STARTS_WITH_UPPER_C_STR]: self.enterOuterAlt(localctx, 1) - self.state = 137 - self.service_call() + self.state = 212 + self.statement() pass - + elif token in [PFDLParser.STARTED_BY, PFDLParser.FINISHED_BY]: + self.enterOuterAlt(localctx, 2) + self.state = 213 + self.eventStatement() + pass + elif token in [PFDLParser.CONSTRAINTS]: + self.enterOuterAlt(localctx, 3) + self.state = 214 + self.constraintStatement() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class StatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def service_call(self): + return self.getTypedRuleContext(PFDLParser.Service_callContext,0) + + + def task_call(self): + return self.getTypedRuleContext(PFDLParser.Task_callContext,0) + + + def parallel(self): + return self.getTypedRuleContext(PFDLParser.ParallelContext,0) + + + def while_loop(self): + return self.getTypedRuleContext(PFDLParser.While_loopContext,0) + + + def counting_loop(self): + return self.getTypedRuleContext(PFDLParser.Counting_loopContext,0) + + + def condition(self): + return self.getTypedRuleContext(PFDLParser.ConditionContext,0) + + + def transportStatement(self): + return self.getTypedRuleContext(PFDLParser.TransportStatementContext,0) + + + def moveStatement(self): + return self.getTypedRuleContext(PFDLParser.MoveStatementContext,0) + + + def actionStatement(self): + return self.getTypedRuleContext(PFDLParser.ActionStatementContext,0) + + + def getRuleIndex(self): + return PFDLParser.RULE_statement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterStatement" ): + listener.enterStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitStatement" ): + listener.exitStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitStatement" ): + return visitor.visitStatement(self) + else: + return visitor.visitChildren(self) + + + + + def statement(self): + + localctx = PFDLParser.StatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 18, self.RULE_statement) + try: + self.state = 226 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,15,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 217 + self.service_call() + pass + elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 138 + self.state = 218 self.task_call() pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 139 + self.state = 219 self.parallel() pass elif la_ == 4: self.enterOuterAlt(localctx, 4) - self.state = 140 + self.state = 220 self.while_loop() pass elif la_ == 5: self.enterOuterAlt(localctx, 5) - self.state = 141 + self.state = 221 self.counting_loop() pass elif la_ == 6: self.enterOuterAlt(localctx, 6) - self.state = 142 + self.state = 222 self.condition() pass + elif la_ == 7: + self.enterOuterAlt(localctx, 7) + self.state = 223 + self.transportStatement() + pass + + elif la_ == 8: + self.enterOuterAlt(localctx, 8) + self.state = 224 + self.moveStatement() + pass + + elif la_ == 9: + self.enterOuterAlt(localctx, 9) + self.state = 225 + self.actionStatement() + pass + except RecognitionException as re: localctx.exception = re @@ -1037,23 +1586,23 @@ def accept(self, visitor:ParseTreeVisitor): def service_call(self): localctx = PFDLParser.Service_callContext(self, self._ctx, self.state) - self.enterRule(localctx, 12, self.RULE_service_call) + self.enterRule(localctx, 20, self.RULE_service_call) self._la = 0 # Token type try: - self.state = 160 + self.state = 243 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,15,self._ctx) + la_ = self._interp.adaptivePredict(self._input,19,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 145 + self.state = 228 self.match(PFDLParser.STARTS_WITH_UPPER_C_STR) - self.state = 147 + self.state = 230 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 146 + self.state = 229 self.match(PFDLParser.NL) - self.state = 149 + self.state = 232 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.NL): @@ -1063,27 +1612,27 @@ def service_call(self): elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 151 + self.state = 234 self.match(PFDLParser.STARTS_WITH_UPPER_C_STR) - self.state = 152 + self.state = 235 self.match(PFDLParser.INDENT) - self.state = 154 + self.state = 237 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.IN: - self.state = 153 + self.state = 236 self.call_input() - self.state = 157 + self.state = 240 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.OUT: - self.state = 156 + self.state = 239 self.call_output() - self.state = 159 + self.state = 242 self.match(PFDLParser.DEDENT) pass @@ -1150,23 +1699,23 @@ def accept(self, visitor:ParseTreeVisitor): def task_call(self): localctx = PFDLParser.Task_callContext(self, self._ctx, self.state) - self.enterRule(localctx, 14, self.RULE_task_call) + self.enterRule(localctx, 22, self.RULE_task_call) self._la = 0 # Token type try: - self.state = 177 + self.state = 260 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,19,self._ctx) + la_ = self._interp.adaptivePredict(self._input,23,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 162 + self.state = 245 self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) - self.state = 164 + self.state = 247 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 163 + self.state = 246 self.match(PFDLParser.NL) - self.state = 166 + self.state = 249 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.NL): @@ -1176,27 +1725,27 @@ def task_call(self): elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 168 + self.state = 251 self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) - self.state = 169 + self.state = 252 self.match(PFDLParser.INDENT) - self.state = 171 + self.state = 254 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.IN: - self.state = 170 + self.state = 253 self.call_input() - self.state = 174 + self.state = 257 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.OUT: - self.state = 173 + self.state = 256 self.call_output() - self.state = 176 + self.state = 259 self.match(PFDLParser.DEDENT) pass @@ -1269,31 +1818,31 @@ def accept(self, visitor:ParseTreeVisitor): def call_input(self): localctx = PFDLParser.Call_inputContext(self, self._ctx, self.state) - self.enterRule(localctx, 16, self.RULE_call_input) + self.enterRule(localctx, 24, self.RULE_call_input) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 179 + self.state = 262 self.match(PFDLParser.IN) - self.state = 180 + self.state = 263 self.match(PFDLParser.INDENT) - self.state = 188 + self.state = 271 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 188 + self.state = 271 self._errHandler.sync(self) token = self._input.LA(1) if token in [PFDLParser.STARTS_WITH_LOWER_C_STR]: - self.state = 181 + self.state = 264 self.parameter() - self.state = 183 + self.state = 266 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 182 + self.state = 265 self.match(PFDLParser.NL) - self.state = 185 + self.state = 268 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.NL): @@ -1301,19 +1850,19 @@ def call_input(self): pass elif token in [PFDLParser.STARTS_WITH_UPPER_C_STR]: - self.state = 187 + self.state = 270 self.struct_initialization() pass else: raise NoViableAltException(self) - self.state = 190 + self.state = 273 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.STARTS_WITH_LOWER_C_STR or _la==PFDLParser.STARTS_WITH_UPPER_C_STR): break - self.state = 192 + self.state = 275 self.match(PFDLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -1376,39 +1925,39 @@ def accept(self, visitor:ParseTreeVisitor): def call_output(self): localctx = PFDLParser.Call_outputContext(self, self._ctx, self.state) - self.enterRule(localctx, 18, self.RULE_call_output) + self.enterRule(localctx, 26, self.RULE_call_output) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 194 + self.state = 277 self.match(PFDLParser.OUT) - self.state = 195 + self.state = 278 self.match(PFDLParser.INDENT) - self.state = 202 + self.state = 285 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 196 + self.state = 279 self.variable_definition() - self.state = 198 + self.state = 281 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 197 + self.state = 280 self.match(PFDLParser.NL) - self.state = 200 + self.state = 283 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.NL): break - self.state = 204 + self.state = 287 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.STARTS_WITH_LOWER_C_STR): break - self.state = 206 + self.state = 289 self.match(PFDLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -1465,27 +2014,27 @@ def accept(self, visitor:ParseTreeVisitor): def parallel(self): localctx = PFDLParser.ParallelContext(self, self._ctx, self.state) - self.enterRule(localctx, 20, self.RULE_parallel) + self.enterRule(localctx, 28, self.RULE_parallel) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 208 + self.state = 291 self.match(PFDLParser.PARALLEL) - self.state = 209 + self.state = 292 self.match(PFDLParser.INDENT) - self.state = 211 + self.state = 294 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 210 + self.state = 293 self.task_call() - self.state = 213 + self.state = 296 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.STARTS_WITH_LOWER_C_STR): break - self.state = 215 + self.state = 298 self.match(PFDLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -1549,31 +2098,31 @@ def accept(self, visitor:ParseTreeVisitor): def while_loop(self): localctx = PFDLParser.While_loopContext(self, self._ctx, self.state) - self.enterRule(localctx, 22, self.RULE_while_loop) + self.enterRule(localctx, 30, self.RULE_while_loop) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 217 + self.state = 300 self.match(PFDLParser.LOOP) - self.state = 218 + self.state = 301 self.match(PFDLParser.WHILE) - self.state = 219 + self.state = 302 self.expression(0) - self.state = 220 + self.state = 303 self.match(PFDLParser.INDENT) - self.state = 222 + self.state = 305 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 221 + self.state = 304 self.statement() - self.state = 224 + self.state = 307 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.LOOP) | (1 << PFDLParser.PARALLEL) | (1 << PFDLParser.CONDITION) | (1 << PFDLParser.STARTS_WITH_LOWER_C_STR) | (1 << PFDLParser.STARTS_WITH_UPPER_C_STR))) != 0)): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.TRANSPORT) | (1 << PFDLParser.MOVE) | (1 << PFDLParser.ACTION) | (1 << PFDLParser.LOOP) | (1 << PFDLParser.PARALLEL) | (1 << PFDLParser.CONDITION))) != 0) or _la==PFDLParser.STARTS_WITH_LOWER_C_STR or _la==PFDLParser.STARTS_WITH_UPPER_C_STR): break - self.state = 226 + self.state = 309 self.match(PFDLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -1646,53 +2195,53 @@ def accept(self, visitor:ParseTreeVisitor): def counting_loop(self): localctx = PFDLParser.Counting_loopContext(self, self._ctx, self.state) - self.enterRule(localctx, 24, self.RULE_counting_loop) + self.enterRule(localctx, 32, self.RULE_counting_loop) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 229 + self.state = 312 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.PARALLEL: - self.state = 228 + self.state = 311 self.match(PFDLParser.PARALLEL) - self.state = 231 + self.state = 314 self.match(PFDLParser.LOOP) - self.state = 232 + self.state = 315 self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) - self.state = 233 + self.state = 316 self.match(PFDLParser.TO) - self.state = 236 + self.state = 319 self._errHandler.sync(self) token = self._input.LA(1) if token in [PFDLParser.STARTS_WITH_LOWER_C_STR]: - self.state = 234 + self.state = 317 self.attribute_access() pass elif token in [PFDLParser.INTEGER]: - self.state = 235 + self.state = 318 self.match(PFDLParser.INTEGER) pass else: raise NoViableAltException(self) - self.state = 238 + self.state = 321 self.match(PFDLParser.INDENT) - self.state = 240 + self.state = 323 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 239 + self.state = 322 self.statement() - self.state = 242 + self.state = 325 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.LOOP) | (1 << PFDLParser.PARALLEL) | (1 << PFDLParser.CONDITION) | (1 << PFDLParser.STARTS_WITH_LOWER_C_STR) | (1 << PFDLParser.STARTS_WITH_UPPER_C_STR))) != 0)): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.TRANSPORT) | (1 << PFDLParser.MOVE) | (1 << PFDLParser.ACTION) | (1 << PFDLParser.LOOP) | (1 << PFDLParser.PARALLEL) | (1 << PFDLParser.CONDITION))) != 0) or _la==PFDLParser.STARTS_WITH_LOWER_C_STR or _la==PFDLParser.STARTS_WITH_UPPER_C_STR): break - self.state = 244 + self.state = 327 self.match(PFDLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -1760,37 +2309,37 @@ def accept(self, visitor:ParseTreeVisitor): def condition(self): localctx = PFDLParser.ConditionContext(self, self._ctx, self.state) - self.enterRule(localctx, 26, self.RULE_condition) + self.enterRule(localctx, 34, self.RULE_condition) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 246 + self.state = 329 self.match(PFDLParser.CONDITION) - self.state = 247 + self.state = 330 self.match(PFDLParser.INDENT) - self.state = 248 + self.state = 331 self.expression(0) - self.state = 250 + self.state = 333 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 249 + self.state = 332 self.match(PFDLParser.NL) - self.state = 252 + self.state = 335 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.NL): break - self.state = 254 + self.state = 337 self.match(PFDLParser.DEDENT) - self.state = 255 + self.state = 338 self.condition_passed() - self.state = 257 + self.state = 340 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.FAILED: - self.state = 256 + self.state = 339 self.condition_failed() @@ -1849,27 +2398,27 @@ def accept(self, visitor:ParseTreeVisitor): def condition_passed(self): localctx = PFDLParser.Condition_passedContext(self, self._ctx, self.state) - self.enterRule(localctx, 28, self.RULE_condition_passed) + self.enterRule(localctx, 36, self.RULE_condition_passed) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 259 + self.state = 342 self.match(PFDLParser.PASSED) - self.state = 260 + self.state = 343 self.match(PFDLParser.INDENT) - self.state = 262 + self.state = 345 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 261 + self.state = 344 self.statement() - self.state = 264 + self.state = 347 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.LOOP) | (1 << PFDLParser.PARALLEL) | (1 << PFDLParser.CONDITION) | (1 << PFDLParser.STARTS_WITH_LOWER_C_STR) | (1 << PFDLParser.STARTS_WITH_UPPER_C_STR))) != 0)): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.TRANSPORT) | (1 << PFDLParser.MOVE) | (1 << PFDLParser.ACTION) | (1 << PFDLParser.LOOP) | (1 << PFDLParser.PARALLEL) | (1 << PFDLParser.CONDITION))) != 0) or _la==PFDLParser.STARTS_WITH_LOWER_C_STR or _la==PFDLParser.STARTS_WITH_UPPER_C_STR): break - self.state = 266 + self.state = 349 self.match(PFDLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -1926,27 +2475,27 @@ def accept(self, visitor:ParseTreeVisitor): def condition_failed(self): localctx = PFDLParser.Condition_failedContext(self, self._ctx, self.state) - self.enterRule(localctx, 30, self.RULE_condition_failed) + self.enterRule(localctx, 38, self.RULE_condition_failed) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 268 + self.state = 351 self.match(PFDLParser.FAILED) - self.state = 269 + self.state = 352 self.match(PFDLParser.INDENT) - self.state = 271 + self.state = 354 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 270 + self.state = 353 self.statement() - self.state = 273 + self.state = 356 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.LOOP) | (1 << PFDLParser.PARALLEL) | (1 << PFDLParser.CONDITION) | (1 << PFDLParser.STARTS_WITH_LOWER_C_STR) | (1 << PFDLParser.STARTS_WITH_UPPER_C_STR))) != 0)): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.TRANSPORT) | (1 << PFDLParser.MOVE) | (1 << PFDLParser.ACTION) | (1 << PFDLParser.LOOP) | (1 << PFDLParser.PARALLEL) | (1 << PFDLParser.CONDITION))) != 0) or _la==PFDLParser.STARTS_WITH_LOWER_C_STR or _la==PFDLParser.STARTS_WITH_UPPER_C_STR): break - self.state = 275 + self.state = 358 self.match(PFDLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -1994,20 +2543,20 @@ def accept(self, visitor:ParseTreeVisitor): def parameter(self): localctx = PFDLParser.ParameterContext(self, self._ctx, self.state) - self.enterRule(localctx, 32, self.RULE_parameter) + self.enterRule(localctx, 40, self.RULE_parameter) try: - self.state = 279 + self.state = 362 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,34,self._ctx) + la_ = self._interp.adaptivePredict(self._input,38,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 277 + self.state = 360 self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 278 + self.state = 361 self.attribute_access() pass @@ -2070,59 +2619,59 @@ def accept(self, visitor:ParseTreeVisitor): def struct_initialization(self): localctx = PFDLParser.Struct_initializationContext(self, self._ctx, self.state) - self.enterRule(localctx, 34, self.RULE_struct_initialization) + self.enterRule(localctx, 42, self.RULE_struct_initialization) self._la = 0 # Token type try: - self.state = 305 + self.state = 388 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,38,self._ctx) + la_ = self._interp.adaptivePredict(self._input,42,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 281 + self.state = 364 self.match(PFDLParser.STARTS_WITH_UPPER_C_STR) - self.state = 282 + self.state = 365 self.match(PFDLParser.INDENT) - self.state = 283 + self.state = 366 self.json_object() - self.state = 285 + self.state = 368 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 284 + self.state = 367 self.match(PFDLParser.NL) - self.state = 287 + self.state = 370 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PFDLParser.NL): break - self.state = 289 + self.state = 372 self.match(PFDLParser.DEDENT) pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 291 + self.state = 374 self.match(PFDLParser.STARTS_WITH_UPPER_C_STR) - self.state = 295 + self.state = 378 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PFDLParser.NL: - self.state = 292 + self.state = 375 self.match(PFDLParser.NL) - self.state = 297 + self.state = 380 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 298 + self.state = 381 self.json_object() - self.state = 302 + self.state = 385 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PFDLParser.NL: - self.state = 299 + self.state = 382 self.match(PFDLParser.NL) - self.state = 304 + self.state = 387 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2178,14 +2727,14 @@ def accept(self, visitor:ParseTreeVisitor): def variable_definition(self): localctx = PFDLParser.Variable_definitionContext(self, self._ctx, self.state) - self.enterRule(localctx, 36, self.RULE_variable_definition) + self.enterRule(localctx, 44, self.RULE_variable_definition) try: self.enterOuterAlt(localctx, 1) - self.state = 307 + self.state = 390 self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) - self.state = 308 + self.state = 391 self.match(PFDLParser.COLON) - self.state = 309 + self.state = 392 self.variable_type() except RecognitionException as re: localctx.exception = re @@ -2234,17 +2783,17 @@ def accept(self, visitor:ParseTreeVisitor): def variable_type(self): localctx = PFDLParser.Variable_typeContext(self, self._ctx, self.state) - self.enterRule(localctx, 38, self.RULE_variable_type) + self.enterRule(localctx, 46, self.RULE_variable_type) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 311 + self.state = 394 self.primitive() - self.state = 313 + self.state = 396 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.ARRAY_LEFT: - self.state = 312 + self.state = 395 self.array() @@ -2276,6 +2825,15 @@ def BOOLEAN_P(self): def STARTS_WITH_UPPER_C_STR(self): return self.getToken(PFDLParser.STARTS_WITH_UPPER_C_STR, 0) + def LOCATION(self): + return self.getToken(PFDLParser.LOCATION, 0) + + def EVENT(self): + return self.getToken(PFDLParser.EVENT, 0) + + def TIME(self): + return self.getToken(PFDLParser.TIME, 0) + def getRuleIndex(self): return PFDLParser.RULE_primitive @@ -2299,13 +2857,13 @@ def accept(self, visitor:ParseTreeVisitor): def primitive(self): localctx = PFDLParser.PrimitiveContext(self, self._ctx, self.state) - self.enterRule(localctx, 40, self.RULE_primitive) + self.enterRule(localctx, 48, self.RULE_primitive) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 315 + self.state = 398 _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.NUMBER_P) | (1 << PFDLParser.STRING_P) | (1 << PFDLParser.BOOLEAN_P) | (1 << PFDLParser.STARTS_WITH_UPPER_C_STR))) != 0)): + if not(((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (PFDLParser.LOCATION - 20)) | (1 << (PFDLParser.EVENT - 20)) | (1 << (PFDLParser.TIME - 20)) | (1 << (PFDLParser.NUMBER_P - 20)) | (1 << (PFDLParser.STRING_P - 20)) | (1 << (PFDLParser.BOOLEAN_P - 20)) | (1 << (PFDLParser.STARTS_WITH_UPPER_C_STR - 20)))) != 0)): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -2368,34 +2926,142 @@ def accept(self, visitor:ParseTreeVisitor): def attribute_access(self): localctx = PFDLParser.Attribute_accessContext(self, self._ctx, self.state) - self.enterRule(localctx, 42, self.RULE_attribute_access) + self.enterRule(localctx, 50, self.RULE_attribute_access) try: - self.enterOuterAlt(localctx, 1) - self.state = 317 - self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) - self.state = 323 + self.state = 421 self._errHandler.sync(self) - _alt = 1 - while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: - if _alt == 1: - self.state = 318 - self.match(PFDLParser.DOT) - self.state = 319 - self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) - self.state = 321 - self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,40,self._ctx) - if la_ == 1: - self.state = 320 - self.array() + la_ = self._interp.adaptivePredict(self._input,48,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 400 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 406 + self._errHandler.sync(self) + _alt = 1 + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt == 1: + self.state = 401 + self.match(PFDLParser.DOT) + self.state = 402 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 404 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,44,self._ctx) + if la_ == 1: + self.state = 403 + self.array() - else: - raise NoViableAltException(self) - self.state = 325 + else: + raise NoViableAltException(self) + self.state = 408 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,45,self._ctx) + + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 410 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 418 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,41,self._ctx) + _alt = self._interp.adaptivePredict(self._input,47,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 411 + self.match(PFDLParser.DOT) + self.state = 412 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 414 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,46,self._ctx) + if la_ == 1: + self.state = 413 + self.array() + + + self.state = 420 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,47,self._ctx) + + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Attribute_assignmentContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def COLON(self): + return self.getToken(PFDLParser.COLON, 0) + + def value(self): + return self.getTypedRuleContext(PFDLParser.ValueContext,0) + + + def json_object(self): + return self.getTypedRuleContext(PFDLParser.Json_objectContext,0) + + + def getRuleIndex(self): + return PFDLParser.RULE_attribute_assignment + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterAttribute_assignment" ): + listener.enterAttribute_assignment(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitAttribute_assignment" ): + listener.exitAttribute_assignment(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitAttribute_assignment" ): + return visitor.visitAttribute_assignment(self) + else: + return visitor.visitChildren(self) + + + + + def attribute_assignment(self): + + localctx = PFDLParser.Attribute_assignmentContext(self, self._ctx, self.state) + self.enterRule(localctx, 52, self.RULE_attribute_assignment) + try: + self.enterOuterAlt(localctx, 1) + self.state = 423 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 424 + self.match(PFDLParser.COLON) + self.state = 427 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [PFDLParser.TRUE, PFDLParser.FALSE, PFDLParser.MINUS, PFDLParser.INTEGER, PFDLParser.FLOAT, PFDLParser.STRING, PFDLParser.STARTS_WITH_LOWER_C_STR]: + self.state = 425 + self.value() + pass + elif token in [PFDLParser.JSON_OPEN, PFDLParser.JSON_OPEN_2]: + self.state = 426 + self.json_object() + pass + else: + raise NoViableAltException(self) except RecognitionException as re: localctx.exception = re @@ -2448,17 +3114,17 @@ def accept(self, visitor:ParseTreeVisitor): def array(self): localctx = PFDLParser.ArrayContext(self, self._ctx, self.state) - self.enterRule(localctx, 44, self.RULE_array) + self.enterRule(localctx, 54, self.RULE_array) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 327 + self.state = 429 self.match(PFDLParser.ARRAY_LEFT) - self.state = 329 + self.state = 431 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.INTEGER or _la==PFDLParser.STARTS_WITH_LOWER_C_STR: - self.state = 328 + self.state = 430 _la = self._input.LA(1) if not(_la==PFDLParser.INTEGER or _la==PFDLParser.STARTS_WITH_LOWER_C_STR): self._errHandler.recoverInline(self) @@ -2467,7 +3133,7 @@ def array(self): self.consume() - self.state = 331 + self.state = 433 self.match(PFDLParser.ARRAY_RIGHT) except RecognitionException as re: localctx.exception = re @@ -2517,19 +3183,19 @@ def accept(self, visitor:ParseTreeVisitor): def number(self): localctx = PFDLParser.NumberContext(self, self._ctx, self.state) - self.enterRule(localctx, 46, self.RULE_number) + self.enterRule(localctx, 56, self.RULE_number) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 334 + self.state = 436 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PFDLParser.MINUS: - self.state = 333 + self.state = 435 self.match(PFDLParser.MINUS) - self.state = 336 + self.state = 438 _la = self._input.LA(1) if not(_la==PFDLParser.INTEGER or _la==PFDLParser.FLOAT): self._errHandler.recoverInline(self) @@ -2592,34 +3258,34 @@ def accept(self, visitor:ParseTreeVisitor): def value(self): localctx = PFDLParser.ValueContext(self, self._ctx, self.state) - self.enterRule(localctx, 48, self.RULE_value) + self.enterRule(localctx, 58, self.RULE_value) try: - self.state = 343 + self.state = 445 self._errHandler.sync(self) token = self._input.LA(1) if token in [PFDLParser.TRUE]: self.enterOuterAlt(localctx, 1) - self.state = 338 + self.state = 440 self.match(PFDLParser.TRUE) pass elif token in [PFDLParser.FALSE]: self.enterOuterAlt(localctx, 2) - self.state = 339 + self.state = 441 self.match(PFDLParser.FALSE) pass elif token in [PFDLParser.MINUS, PFDLParser.INTEGER, PFDLParser.FLOAT]: self.enterOuterAlt(localctx, 3) - self.state = 340 + self.state = 442 self.number() pass elif token in [PFDLParser.STRING]: self.enterOuterAlt(localctx, 4) - self.state = 341 + self.state = 443 self.match(PFDLParser.STRING) pass elif token in [PFDLParser.STARTS_WITH_LOWER_C_STR]: self.enterOuterAlt(localctx, 5) - self.state = 342 + self.state = 444 self.attribute_access() pass else: @@ -2662,6 +3328,14 @@ def value(self): return self.getTypedRuleContext(PFDLParser.ValueContext,0) + def attribute_access(self): + return self.getTypedRuleContext(PFDLParser.Attribute_accessContext,0) + + + def rule_call(self): + return self.getTypedRuleContext(PFDLParser.Rule_callContext,0) + + def STAR(self): return self.getToken(PFDLParser.STAR, 0) @@ -2708,141 +3382,152 @@ def expression(self, _p:int=0): _parentState = self.state localctx = PFDLParser.ExpressionContext(self, self._ctx, _parentState) _prevctx = localctx - _startState = 50 - self.enterRecursionRule(localctx, 50, self.RULE_expression, _p) + _startState = 60 + self.enterRecursionRule(localctx, 60, self.RULE_expression, _p) try: self.enterOuterAlt(localctx, 1) - self.state = 354 + self.state = 458 self._errHandler.sync(self) - token = self._input.LA(1) - if token in [PFDLParser.LEFT_PARENTHESIS]: - self.state = 346 + la_ = self._interp.adaptivePredict(self._input,53,self._ctx) + if la_ == 1: + self.state = 448 self.match(PFDLParser.LEFT_PARENTHESIS) - self.state = 347 + self.state = 449 self.expression(0) - self.state = 348 + self.state = 450 self.match(PFDLParser.RIGHT_PARENTHESIS) pass - elif token in [PFDLParser.BOOLEAN_NOT]: - self.state = 350 + + elif la_ == 2: + self.state = 452 self.unOperation() - self.state = 351 - self.expression(4) + self.state = 453 + self.expression(6) pass - elif token in [PFDLParser.TRUE, PFDLParser.FALSE, PFDLParser.MINUS, PFDLParser.INTEGER, PFDLParser.FLOAT, PFDLParser.STRING, PFDLParser.STARTS_WITH_LOWER_C_STR]: - self.state = 353 + + elif la_ == 3: + self.state = 455 self.value() pass - else: - raise NoViableAltException(self) + + elif la_ == 4: + self.state = 456 + self.attribute_access() + pass + + elif la_ == 5: + self.state = 457 + self.rule_call() + pass + self._ctx.stop = self._input.LT(-1) - self.state = 380 + self.state = 484 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,47,self._ctx) + _alt = self._interp.adaptivePredict(self._input,55,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 378 + self.state = 482 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,46,self._ctx) + la_ = self._interp.adaptivePredict(self._input,54,self._ctx) if la_ == 1: localctx = PFDLParser.ExpressionContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 356 - if not self.precpred(self._ctx, 9): + self.state = 460 + if not self.precpred(self._ctx, 11): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 357 + raise FailedPredicateException(self, "self.precpred(self._ctx, 11)") + self.state = 461 self.match(PFDLParser.STAR) - self.state = 358 - self.expression(10) + self.state = 462 + self.expression(12) pass elif la_ == 2: localctx = PFDLParser.ExpressionContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 359 - if not self.precpred(self._ctx, 8): + self.state = 463 + if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 360 + raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") + self.state = 464 self.match(PFDLParser.SLASH) - self.state = 361 - self.expression(9) + self.state = 465 + self.expression(11) pass elif la_ == 3: localctx = PFDLParser.ExpressionContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 362 - if not self.precpred(self._ctx, 7): + self.state = 466 + if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 363 + raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") + self.state = 467 self.match(PFDLParser.MINUS) - self.state = 364 - self.expression(8) + self.state = 468 + self.expression(10) pass elif la_ == 4: localctx = PFDLParser.ExpressionContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 365 - if not self.precpred(self._ctx, 6): + self.state = 469 + if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 366 + raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") + self.state = 470 self.match(PFDLParser.PLUS) - self.state = 367 - self.expression(7) + self.state = 471 + self.expression(9) pass elif la_ == 5: localctx = PFDLParser.ExpressionContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 368 - if not self.precpred(self._ctx, 5): + self.state = 472 + if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 369 + raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") + self.state = 473 self.binOperation() - self.state = 370 - self.expression(6) + self.state = 474 + self.expression(8) pass elif la_ == 6: localctx = PFDLParser.ExpressionContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 372 - if not self.precpred(self._ctx, 3): + self.state = 476 + if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 373 + raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") + self.state = 477 self.match(PFDLParser.BOOLEAN_AND) - self.state = 374 - self.expression(4) + self.state = 478 + self.expression(6) pass elif la_ == 7: localctx = PFDLParser.ExpressionContext(self, _parentctx, _parentState) self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 375 - if not self.precpred(self._ctx, 2): + self.state = 479 + if not self.precpred(self._ctx, 4): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") - self.state = 376 + raise FailedPredicateException(self, "self.precpred(self._ctx, 4)") + self.state = 480 self.match(PFDLParser.BOOLEAN_OR) - self.state = 377 - self.expression(3) + self.state = 481 + self.expression(5) pass - self.state = 382 + self.state = 486 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,47,self._ctx) + _alt = self._interp.adaptivePredict(self._input,55,self._ctx) except RecognitionException as re: localctx.exception = re @@ -2901,11 +3586,11 @@ def accept(self, visitor:ParseTreeVisitor): def binOperation(self): localctx = PFDLParser.BinOperationContext(self, self._ctx, self.state) - self.enterRule(localctx, 52, self.RULE_binOperation) + self.enterRule(localctx, 62, self.RULE_binOperation) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 383 + self.state = 487 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.LESS_THAN) | (1 << PFDLParser.LESS_THAN_OR_EQUAL) | (1 << PFDLParser.GREATER_THAN) | (1 << PFDLParser.GREATER_THAN_OR_EQUAL) | (1 << PFDLParser.EQUAL) | (1 << PFDLParser.NOT_EQUAL))) != 0)): self._errHandler.recoverInline(self) @@ -2954,10 +3639,10 @@ def accept(self, visitor:ParseTreeVisitor): def unOperation(self): localctx = PFDLParser.UnOperationContext(self, self._ctx, self.state) - self.enterRule(localctx, 54, self.RULE_unOperation) + self.enterRule(localctx, 64, self.RULE_unOperation) try: self.enterOuterAlt(localctx, 1) - self.state = 385 + self.state = 489 self.match(PFDLParser.BOOLEAN_NOT) except RecognitionException as re: localctx.exception = re @@ -3018,39 +3703,39 @@ def accept(self, visitor:ParseTreeVisitor): def json_object(self): localctx = PFDLParser.Json_objectContext(self, self._ctx, self.state) - self.enterRule(localctx, 56, self.RULE_json_object) + self.enterRule(localctx, 66, self.RULE_json_object) self._la = 0 # Token type try: - self.state = 401 + self.state = 505 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,49,self._ctx) + la_ = self._interp.adaptivePredict(self._input,57,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 387 + self.state = 491 self.json_open_bracket() - self.state = 388 + self.state = 492 self.pair() - self.state = 393 + self.state = 497 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PFDLParser.JSON_COMMA: - self.state = 389 + self.state = 493 self.match(PFDLParser.JSON_COMMA) - self.state = 390 + self.state = 494 self.pair() - self.state = 395 + self.state = 499 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 396 + self.state = 500 self.match(PFDLParser.JSON_CLOSE) pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 398 + self.state = 502 self.json_open_bracket() - self.state = 399 + self.state = 503 self.match(PFDLParser.JSON_CLOSE) pass @@ -3104,14 +3789,14 @@ def accept(self, visitor:ParseTreeVisitor): def pair(self): localctx = PFDLParser.PairContext(self, self._ctx, self.state) - self.enterRule(localctx, 58, self.RULE_pair) + self.enterRule(localctx, 68, self.RULE_pair) try: self.enterOuterAlt(localctx, 1) - self.state = 403 + self.state = 507 self.match(PFDLParser.JSON_STRING) - self.state = 404 + self.state = 508 self.match(PFDLParser.JSON_COLON) - self.state = 405 + self.state = 509 self.json_value() except RecognitionException as re: localctx.exception = re @@ -3158,11 +3843,11 @@ def accept(self, visitor:ParseTreeVisitor): def json_open_bracket(self): localctx = PFDLParser.Json_open_bracketContext(self, self._ctx, self.state) - self.enterRule(localctx, 60, self.RULE_json_open_bracket) + self.enterRule(localctx, 70, self.RULE_json_open_bracket) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 407 + self.state = 511 _la = self._input.LA(1) if not(_la==PFDLParser.JSON_OPEN or _la==PFDLParser.JSON_OPEN_2): self._errHandler.recoverInline(self) @@ -3228,39 +3913,39 @@ def accept(self, visitor:ParseTreeVisitor): def json_value(self): localctx = PFDLParser.Json_valueContext(self, self._ctx, self.state) - self.enterRule(localctx, 62, self.RULE_json_value) + self.enterRule(localctx, 72, self.RULE_json_value) try: - self.state = 415 + self.state = 519 self._errHandler.sync(self) token = self._input.LA(1) if token in [PFDLParser.JSON_STRING]: self.enterOuterAlt(localctx, 1) - self.state = 409 + self.state = 513 self.match(PFDLParser.JSON_STRING) pass elif token in [PFDLParser.JSON_TRUE]: self.enterOuterAlt(localctx, 2) - self.state = 410 + self.state = 514 self.match(PFDLParser.JSON_TRUE) pass elif token in [PFDLParser.JSON_FALSE]: self.enterOuterAlt(localctx, 3) - self.state = 411 + self.state = 515 self.match(PFDLParser.JSON_FALSE) pass elif token in [PFDLParser.NUMBER]: self.enterOuterAlt(localctx, 4) - self.state = 412 + self.state = 516 self.match(PFDLParser.NUMBER) pass elif token in [PFDLParser.JSON_OPEN, PFDLParser.JSON_OPEN_2]: self.enterOuterAlt(localctx, 5) - self.state = 413 + self.state = 517 self.json_object() pass elif token in [PFDLParser.JSON_ARRAY_LEFT]: self.enterOuterAlt(localctx, 6) - self.state = 414 + self.state = 518 self.json_array() pass else: @@ -3324,39 +4009,39 @@ def accept(self, visitor:ParseTreeVisitor): def json_array(self): localctx = PFDLParser.Json_arrayContext(self, self._ctx, self.state) - self.enterRule(localctx, 64, self.RULE_json_array) + self.enterRule(localctx, 74, self.RULE_json_array) self._la = 0 # Token type try: - self.state = 430 + self.state = 534 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,52,self._ctx) + la_ = self._interp.adaptivePredict(self._input,60,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 417 + self.state = 521 self.match(PFDLParser.JSON_ARRAY_LEFT) - self.state = 418 + self.state = 522 self.json_value() - self.state = 423 + self.state = 527 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PFDLParser.JSON_COMMA: - self.state = 419 + self.state = 523 self.match(PFDLParser.JSON_COMMA) - self.state = 420 + self.state = 524 self.json_value() - self.state = 425 + self.state = 529 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 426 + self.state = 530 self.match(PFDLParser.JSON_ARRAY_RIGHT) pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 428 + self.state = 532 self.match(PFDLParser.JSON_ARRAY_LEFT) - self.state = 429 + self.state = 533 self.match(PFDLParser.JSON_ARRAY_RIGHT) pass @@ -3370,44 +4055,1498 @@ def json_array(self): return localctx + class TransportStatementContext(ParserRuleContext): + __slots__ = 'parser' - def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): - if self._predicates == None: - self._predicates = dict() - self._predicates[25] = self.expression_sempred - pred = self._predicates.get(ruleIndex, None) - if pred is None: - raise Exception("No predicate with index:" + str(ruleIndex)) - else: - return pred(localctx, predIndex) + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser - def expression_sempred(self, localctx:ExpressionContext, predIndex:int): - if predIndex == 0: - return self.precpred(self._ctx, 9) - + def TRANSPORT(self): + return self.getToken(PFDLParser.TRANSPORT, 0) - if predIndex == 1: - return self.precpred(self._ctx, 8) - + def NL(self): + return self.getToken(PFDLParser.NL, 0) - if predIndex == 2: - return self.precpred(self._ctx, 7) - + def FROM(self): + return self.getToken(PFDLParser.FROM, 0) - if predIndex == 3: - return self.precpred(self._ctx, 6) - + def tosCollectionStatement(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PFDLParser.TosCollectionStatementContext) + else: + return self.getTypedRuleContext(PFDLParser.TosCollectionStatementContext,i) - if predIndex == 4: - return self.precpred(self._ctx, 5) - - if predIndex == 5: - return self.precpred(self._ctx, 3) + def TO(self): + return self.getToken(PFDLParser.TO, 0) + + def getRuleIndex(self): + return PFDLParser.RULE_transportStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterTransportStatement" ): + listener.enterTransportStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitTransportStatement" ): + listener.exitTransportStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitTransportStatement" ): + return visitor.visitTransportStatement(self) + else: + return visitor.visitChildren(self) + + + + + def transportStatement(self): + + localctx = PFDLParser.TransportStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 76, self.RULE_transportStatement) + try: + self.enterOuterAlt(localctx, 1) + self.state = 536 + self.match(PFDLParser.TRANSPORT) + self.state = 537 + self.match(PFDLParser.NL) + self.state = 538 + self.match(PFDLParser.FROM) + self.state = 539 + self.tosCollectionStatement() + self.state = 540 + self.match(PFDLParser.TO) + self.state = 541 + self.tosCollectionStatement() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class TosCollectionStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def STARTS_WITH_LOWER_C_STR(self, i:int=None): + if i is None: + return self.getTokens(PFDLParser.STARTS_WITH_LOWER_C_STR) + else: + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, i) + + def NL(self): + return self.getToken(PFDLParser.NL, 0) + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PFDLParser.COMMA) + else: + return self.getToken(PFDLParser.COMMA, i) + + def getRuleIndex(self): + return PFDLParser.RULE_tosCollectionStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterTosCollectionStatement" ): + listener.enterTosCollectionStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitTosCollectionStatement" ): + listener.exitTosCollectionStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitTosCollectionStatement" ): + return visitor.visitTosCollectionStatement(self) + else: + return visitor.visitChildren(self) + + + + + def tosCollectionStatement(self): + + localctx = PFDLParser.TosCollectionStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 78, self.RULE_tosCollectionStatement) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 543 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 548 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PFDLParser.COMMA: + self.state = 544 + self.match(PFDLParser.COMMA) + self.state = 545 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 550 + self._errHandler.sync(self) + _la = self._input.LA(1) + + self.state = 551 + self.match(PFDLParser.NL) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class MoveStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def MOVE(self): + return self.getToken(PFDLParser.MOVE, 0) + + def NL(self, i:int=None): + if i is None: + return self.getTokens(PFDLParser.NL) + else: + return self.getToken(PFDLParser.NL, i) + + def TO(self): + return self.getToken(PFDLParser.TO, 0) + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def getRuleIndex(self): + return PFDLParser.RULE_moveStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterMoveStatement" ): + listener.enterMoveStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitMoveStatement" ): + listener.exitMoveStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitMoveStatement" ): + return visitor.visitMoveStatement(self) + else: + return visitor.visitChildren(self) + + + + + def moveStatement(self): + + localctx = PFDLParser.MoveStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 80, self.RULE_moveStatement) + try: + self.enterOuterAlt(localctx, 1) + self.state = 553 + self.match(PFDLParser.MOVE) + self.state = 554 + self.match(PFDLParser.NL) + self.state = 555 + self.match(PFDLParser.TO) + self.state = 556 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 557 + self.match(PFDLParser.NL) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ActionStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def ACTION(self): + return self.getToken(PFDLParser.ACTION, 0) + + def NL(self, i:int=None): + if i is None: + return self.getTokens(PFDLParser.NL) + else: + return self.getToken(PFDLParser.NL, i) + + def DO(self): + return self.getToken(PFDLParser.DO, 0) + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def getRuleIndex(self): + return PFDLParser.RULE_actionStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterActionStatement" ): + listener.enterActionStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitActionStatement" ): + listener.exitActionStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitActionStatement" ): + return visitor.visitActionStatement(self) + else: + return visitor.visitChildren(self) + + + + + def actionStatement(self): + + localctx = PFDLParser.ActionStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 82, self.RULE_actionStatement) + try: + self.enterOuterAlt(localctx, 1) + self.state = 559 + self.match(PFDLParser.ACTION) + self.state = 560 + self.match(PFDLParser.NL) + self.state = 561 + self.match(PFDLParser.DO) + self.state = 562 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 563 + self.match(PFDLParser.NL) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ConstraintStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def CONSTRAINTS(self): + return self.getToken(PFDLParser.CONSTRAINTS, 0) + + def NL(self): + return self.getToken(PFDLParser.NL, 0) + + def expression(self): + return self.getTypedRuleContext(PFDLParser.ExpressionContext,0) + + + def json_object(self): + return self.getTypedRuleContext(PFDLParser.Json_objectContext,0) + + + def getRuleIndex(self): + return PFDLParser.RULE_constraintStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterConstraintStatement" ): + listener.enterConstraintStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitConstraintStatement" ): + listener.exitConstraintStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitConstraintStatement" ): + return visitor.visitConstraintStatement(self) + else: + return visitor.visitChildren(self) + + + + + def constraintStatement(self): + + localctx = PFDLParser.ConstraintStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 84, self.RULE_constraintStatement) + try: + self.enterOuterAlt(localctx, 1) + self.state = 565 + self.match(PFDLParser.CONSTRAINTS) + self.state = 568 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [PFDLParser.TRUE, PFDLParser.FALSE, PFDLParser.LEFT_PARENTHESIS, PFDLParser.BOOLEAN_NOT, PFDLParser.MINUS, PFDLParser.INTEGER, PFDLParser.FLOAT, PFDLParser.STRING, PFDLParser.STARTS_WITH_LOWER_C_STR]: + self.state = 566 + self.expression(0) + pass + elif token in [PFDLParser.JSON_OPEN, PFDLParser.JSON_OPEN_2]: + self.state = 567 + self.json_object() + pass + else: + raise NoViableAltException(self) + + self.state = 570 + self.match(PFDLParser.NL) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class MoveOrderStepContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def MOVE_ORDER_STEP(self): + return self.getToken(PFDLParser.MOVE_ORDER_STEP, 0) + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def INDENT(self): + return self.getToken(PFDLParser.INDENT, 0) + + def DEDENT(self): + return self.getToken(PFDLParser.DEDENT, 0) + + def END(self): + return self.getToken(PFDLParser.END, 0) + + def mosStatement(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PFDLParser.MosStatementContext) + else: + return self.getTypedRuleContext(PFDLParser.MosStatementContext,i) + + + def getRuleIndex(self): + return PFDLParser.RULE_moveOrderStep + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterMoveOrderStep" ): + listener.enterMoveOrderStep(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitMoveOrderStep" ): + listener.exitMoveOrderStep(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitMoveOrderStep" ): + return visitor.visitMoveOrderStep(self) + else: + return visitor.visitChildren(self) + + + + + def moveOrderStep(self): + + localctx = PFDLParser.MoveOrderStepContext(self, self._ctx, self.state) + self.enterRule(localctx, 86, self.RULE_moveOrderStep) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 572 + self.match(PFDLParser.MOVE_ORDER_STEP) + self.state = 573 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 574 + self.match(PFDLParser.INDENT) + self.state = 576 + self._errHandler.sync(self) + _la = self._input.LA(1) + while True: + self.state = 575 + self.mosStatement() + self.state = 578 + self._errHandler.sync(self) + _la = self._input.LA(1) + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.ON_DONE) | (1 << PFDLParser.STARTED_BY) | (1 << PFDLParser.FINISHED_BY) | (1 << PFDLParser.LOCATION))) != 0)): + break + + self.state = 580 + self.match(PFDLParser.DEDENT) + self.state = 581 + self.match(PFDLParser.END) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class MosStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def locationStatement(self): + return self.getTypedRuleContext(PFDLParser.LocationStatementContext,0) + + + def eventStatement(self): + return self.getTypedRuleContext(PFDLParser.EventStatementContext,0) + + + def onDoneStatement(self): + return self.getTypedRuleContext(PFDLParser.OnDoneStatementContext,0) + + + def getRuleIndex(self): + return PFDLParser.RULE_mosStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterMosStatement" ): + listener.enterMosStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitMosStatement" ): + listener.exitMosStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitMosStatement" ): + return visitor.visitMosStatement(self) + else: + return visitor.visitChildren(self) + + + + + def mosStatement(self): + + localctx = PFDLParser.MosStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 88, self.RULE_mosStatement) + try: + self.state = 586 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [PFDLParser.LOCATION]: + self.enterOuterAlt(localctx, 1) + self.state = 583 + self.locationStatement() + pass + elif token in [PFDLParser.STARTED_BY, PFDLParser.FINISHED_BY]: + self.enterOuterAlt(localctx, 2) + self.state = 584 + self.eventStatement() + pass + elif token in [PFDLParser.ON_DONE]: + self.enterOuterAlt(localctx, 3) + self.state = 585 + self.onDoneStatement() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ActionOrderStepContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def ACTION_ORDER_STEP(self): + return self.getToken(PFDLParser.ACTION_ORDER_STEP, 0) + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def INDENT(self): + return self.getToken(PFDLParser.INDENT, 0) + + def DEDENT(self): + return self.getToken(PFDLParser.DEDENT, 0) + + def END(self): + return self.getToken(PFDLParser.END, 0) + + def aosStatement(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PFDLParser.AosStatementContext) + else: + return self.getTypedRuleContext(PFDLParser.AosStatementContext,i) + + + def getRuleIndex(self): + return PFDLParser.RULE_actionOrderStep + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterActionOrderStep" ): + listener.enterActionOrderStep(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitActionOrderStep" ): + listener.exitActionOrderStep(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitActionOrderStep" ): + return visitor.visitActionOrderStep(self) + else: + return visitor.visitChildren(self) + + + + + def actionOrderStep(self): + + localctx = PFDLParser.ActionOrderStepContext(self, self._ctx, self.state) + self.enterRule(localctx, 90, self.RULE_actionOrderStep) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 588 + self.match(PFDLParser.ACTION_ORDER_STEP) + self.state = 589 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 590 + self.match(PFDLParser.INDENT) + self.state = 592 + self._errHandler.sync(self) + _la = self._input.LA(1) + while True: + self.state = 591 + self.aosStatement() + self.state = 594 + self._errHandler.sync(self) + _la = self._input.LA(1) + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.ON_DONE) | (1 << PFDLParser.PARAMETERS) | (1 << PFDLParser.STARTED_BY) | (1 << PFDLParser.FINISHED_BY))) != 0)): + break + + self.state = 596 + self.match(PFDLParser.DEDENT) + self.state = 597 + self.match(PFDLParser.END) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class AosStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def parameterStatement(self): + return self.getTypedRuleContext(PFDLParser.ParameterStatementContext,0) + + + def eventStatement(self): + return self.getTypedRuleContext(PFDLParser.EventStatementContext,0) + + + def onDoneStatement(self): + return self.getTypedRuleContext(PFDLParser.OnDoneStatementContext,0) + + + def getRuleIndex(self): + return PFDLParser.RULE_aosStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterAosStatement" ): + listener.enterAosStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitAosStatement" ): + listener.exitAosStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitAosStatement" ): + return visitor.visitAosStatement(self) + else: + return visitor.visitChildren(self) + + + + + def aosStatement(self): + + localctx = PFDLParser.AosStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 92, self.RULE_aosStatement) + try: + self.state = 602 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [PFDLParser.PARAMETERS]: + self.enterOuterAlt(localctx, 1) + self.state = 599 + self.parameterStatement() + pass + elif token in [PFDLParser.STARTED_BY, PFDLParser.FINISHED_BY]: + self.enterOuterAlt(localctx, 2) + self.state = 600 + self.eventStatement() + pass + elif token in [PFDLParser.ON_DONE]: + self.enterOuterAlt(localctx, 3) + self.state = 601 + self.onDoneStatement() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class OrderStepContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def transportOrderStep(self): + return self.getTypedRuleContext(PFDLParser.TransportOrderStepContext,0) + + + def moveOrderStep(self): + return self.getTypedRuleContext(PFDLParser.MoveOrderStepContext,0) + + + def actionOrderStep(self): + return self.getTypedRuleContext(PFDLParser.ActionOrderStepContext,0) + + + def getRuleIndex(self): + return PFDLParser.RULE_orderStep + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterOrderStep" ): + listener.enterOrderStep(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitOrderStep" ): + listener.exitOrderStep(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitOrderStep" ): + return visitor.visitOrderStep(self) + else: + return visitor.visitChildren(self) + + + + + def orderStep(self): + + localctx = PFDLParser.OrderStepContext(self, self._ctx, self.state) + self.enterRule(localctx, 94, self.RULE_orderStep) + try: + self.state = 607 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [PFDLParser.TRANSPORT_ORDER_STEP]: + self.enterOuterAlt(localctx, 1) + self.state = 604 + self.transportOrderStep() + pass + elif token in [PFDLParser.MOVE_ORDER_STEP]: + self.enterOuterAlt(localctx, 2) + self.state = 605 + self.moveOrderStep() + pass + elif token in [PFDLParser.ACTION_ORDER_STEP]: + self.enterOuterAlt(localctx, 3) + self.state = 606 + self.actionOrderStep() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class TransportOrderStepContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def TRANSPORT_ORDER_STEP(self): + return self.getToken(PFDLParser.TRANSPORT_ORDER_STEP, 0) + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def INDENT(self): + return self.getToken(PFDLParser.INDENT, 0) + + def DEDENT(self): + return self.getToken(PFDLParser.DEDENT, 0) + + def END(self): + return self.getToken(PFDLParser.END, 0) + + def tosStatement(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PFDLParser.TosStatementContext) + else: + return self.getTypedRuleContext(PFDLParser.TosStatementContext,i) + + + def getRuleIndex(self): + return PFDLParser.RULE_transportOrderStep + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterTransportOrderStep" ): + listener.enterTransportOrderStep(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitTransportOrderStep" ): + listener.exitTransportOrderStep(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitTransportOrderStep" ): + return visitor.visitTransportOrderStep(self) + else: + return visitor.visitChildren(self) + + + + + def transportOrderStep(self): + + localctx = PFDLParser.TransportOrderStepContext(self, self._ctx, self.state) + self.enterRule(localctx, 96, self.RULE_transportOrderStep) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 609 + self.match(PFDLParser.TRANSPORT_ORDER_STEP) + self.state = 610 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 611 + self.match(PFDLParser.INDENT) + self.state = 613 + self._errHandler.sync(self) + _la = self._input.LA(1) + while True: + self.state = 612 + self.tosStatement() + self.state = 615 + self._errHandler.sync(self) + _la = self._input.LA(1) + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PFDLParser.ON_DONE) | (1 << PFDLParser.PARAMETERS) | (1 << PFDLParser.STARTED_BY) | (1 << PFDLParser.FINISHED_BY) | (1 << PFDLParser.LOCATION))) != 0)): + break + + self.state = 617 + self.match(PFDLParser.DEDENT) + self.state = 618 + self.match(PFDLParser.END) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class TosStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def locationStatement(self): + return self.getTypedRuleContext(PFDLParser.LocationStatementContext,0) + + + def parameterStatement(self): + return self.getTypedRuleContext(PFDLParser.ParameterStatementContext,0) + + + def eventStatement(self): + return self.getTypedRuleContext(PFDLParser.EventStatementContext,0) + + + def onDoneStatement(self): + return self.getTypedRuleContext(PFDLParser.OnDoneStatementContext,0) + + + def getRuleIndex(self): + return PFDLParser.RULE_tosStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterTosStatement" ): + listener.enterTosStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitTosStatement" ): + listener.exitTosStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitTosStatement" ): + return visitor.visitTosStatement(self) + else: + return visitor.visitChildren(self) + + + + + def tosStatement(self): + + localctx = PFDLParser.TosStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 98, self.RULE_tosStatement) + try: + self.state = 624 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [PFDLParser.LOCATION]: + self.enterOuterAlt(localctx, 1) + self.state = 620 + self.locationStatement() + pass + elif token in [PFDLParser.PARAMETERS]: + self.enterOuterAlt(localctx, 2) + self.state = 621 + self.parameterStatement() + pass + elif token in [PFDLParser.STARTED_BY, PFDLParser.FINISHED_BY]: + self.enterOuterAlt(localctx, 3) + self.state = 622 + self.eventStatement() + pass + elif token in [PFDLParser.ON_DONE]: + self.enterOuterAlt(localctx, 4) + self.state = 623 + self.onDoneStatement() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class LocationStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def LOCATION(self): + return self.getToken(PFDLParser.LOCATION, 0) + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def NL(self): + return self.getToken(PFDLParser.NL, 0) + + def getRuleIndex(self): + return PFDLParser.RULE_locationStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterLocationStatement" ): + listener.enterLocationStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitLocationStatement" ): + listener.exitLocationStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitLocationStatement" ): + return visitor.visitLocationStatement(self) + else: + return visitor.visitChildren(self) + + + + + def locationStatement(self): + + localctx = PFDLParser.LocationStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 100, self.RULE_locationStatement) + try: + self.enterOuterAlt(localctx, 1) + self.state = 626 + self.match(PFDLParser.LOCATION) + self.state = 627 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 628 + self.match(PFDLParser.NL) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ParameterStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def PARAMETERS(self): + return self.getToken(PFDLParser.PARAMETERS, 0) + + def NL(self): + return self.getToken(PFDLParser.NL, 0) + + def value(self): + return self.getTypedRuleContext(PFDLParser.ValueContext,0) + + + def json_object(self): + return self.getTypedRuleContext(PFDLParser.Json_objectContext,0) + + + def getRuleIndex(self): + return PFDLParser.RULE_parameterStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterParameterStatement" ): + listener.enterParameterStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitParameterStatement" ): + listener.exitParameterStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitParameterStatement" ): + return visitor.visitParameterStatement(self) + else: + return visitor.visitChildren(self) + + + + + def parameterStatement(self): + + localctx = PFDLParser.ParameterStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 102, self.RULE_parameterStatement) + try: + self.enterOuterAlt(localctx, 1) + self.state = 630 + self.match(PFDLParser.PARAMETERS) + self.state = 633 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [PFDLParser.TRUE, PFDLParser.FALSE, PFDLParser.MINUS, PFDLParser.INTEGER, PFDLParser.FLOAT, PFDLParser.STRING, PFDLParser.STARTS_WITH_LOWER_C_STR]: + self.state = 631 + self.value() + pass + elif token in [PFDLParser.JSON_OPEN, PFDLParser.JSON_OPEN_2]: + self.state = 632 + self.json_object() + pass + else: + raise NoViableAltException(self) + + self.state = 635 + self.match(PFDLParser.NL) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class EventStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def STARTED_BY(self): + return self.getToken(PFDLParser.STARTED_BY, 0) + + def expression(self): + return self.getTypedRuleContext(PFDLParser.ExpressionContext,0) + + + def NL(self): + return self.getToken(PFDLParser.NL, 0) + + def FINISHED_BY(self): + return self.getToken(PFDLParser.FINISHED_BY, 0) + + def getRuleIndex(self): + return PFDLParser.RULE_eventStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterEventStatement" ): + listener.enterEventStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitEventStatement" ): + listener.exitEventStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitEventStatement" ): + return visitor.visitEventStatement(self) + else: + return visitor.visitChildren(self) + + + + + def eventStatement(self): + + localctx = PFDLParser.EventStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 104, self.RULE_eventStatement) + try: + self.state = 645 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [PFDLParser.STARTED_BY]: + self.enterOuterAlt(localctx, 1) + self.state = 637 + self.match(PFDLParser.STARTED_BY) + self.state = 638 + self.expression(0) + self.state = 639 + self.match(PFDLParser.NL) + pass + elif token in [PFDLParser.FINISHED_BY]: + self.enterOuterAlt(localctx, 2) + self.state = 641 + self.match(PFDLParser.FINISHED_BY) + self.state = 642 + self.expression(0) + self.state = 643 + self.match(PFDLParser.NL) + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class OnDoneStatementContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def ON_DONE(self): + return self.getToken(PFDLParser.ON_DONE, 0) + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def NL(self): + return self.getToken(PFDLParser.NL, 0) + + def getRuleIndex(self): + return PFDLParser.RULE_onDoneStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterOnDoneStatement" ): + listener.enterOnDoneStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitOnDoneStatement" ): + listener.exitOnDoneStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitOnDoneStatement" ): + return visitor.visitOnDoneStatement(self) + else: + return visitor.visitChildren(self) + + + + + def onDoneStatement(self): + + localctx = PFDLParser.OnDoneStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 106, self.RULE_onDoneStatement) + try: + self.enterOuterAlt(localctx, 1) + self.state = 647 + self.match(PFDLParser.ON_DONE) + self.state = 648 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 649 + self.match(PFDLParser.NL) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Rule_Context(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def RULE(self): + return self.getToken(PFDLParser.RULE, 0) + + def rule_call(self): + return self.getTypedRuleContext(PFDLParser.Rule_callContext,0) + + + def INDENT(self): + return self.getToken(PFDLParser.INDENT, 0) + + def DEDENT(self): + return self.getToken(PFDLParser.DEDENT, 0) + + def END(self): + return self.getToken(PFDLParser.END, 0) + + def expression(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PFDLParser.ExpressionContext) + else: + return self.getTypedRuleContext(PFDLParser.ExpressionContext,i) + + + def NL(self, i:int=None): + if i is None: + return self.getTokens(PFDLParser.NL) + else: + return self.getToken(PFDLParser.NL, i) + + def getRuleIndex(self): + return PFDLParser.RULE_rule_ + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterRule_" ): + listener.enterRule_(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitRule_" ): + listener.exitRule_(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitRule_" ): + return visitor.visitRule_(self) + else: + return visitor.visitChildren(self) + + + + + def rule_(self): + + localctx = PFDLParser.Rule_Context(self, self._ctx, self.state) + self.enterRule(localctx, 108, self.RULE_rule_) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 651 + self.match(PFDLParser.RULE) + self.state = 652 + self.rule_call() + self.state = 653 + self.match(PFDLParser.INDENT) + self.state = 657 + self._errHandler.sync(self) + _la = self._input.LA(1) + while True: + self.state = 654 + self.expression(0) + self.state = 655 + self.match(PFDLParser.NL) + self.state = 659 + self._errHandler.sync(self) + _la = self._input.LA(1) + if not (((((_la - 39)) & ~0x3f) == 0 and ((1 << (_la - 39)) & ((1 << (PFDLParser.TRUE - 39)) | (1 << (PFDLParser.FALSE - 39)) | (1 << (PFDLParser.LEFT_PARENTHESIS - 39)) | (1 << (PFDLParser.BOOLEAN_NOT - 39)) | (1 << (PFDLParser.MINUS - 39)) | (1 << (PFDLParser.INTEGER - 39)) | (1 << (PFDLParser.FLOAT - 39)) | (1 << (PFDLParser.STRING - 39)) | (1 << (PFDLParser.STARTS_WITH_LOWER_C_STR - 39)))) != 0)): + break + + self.state = 661 + self.match(PFDLParser.DEDENT) + self.state = 662 + self.match(PFDLParser.END) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Rule_callContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def LEFT_PARENTHESIS(self): + return self.getToken(PFDLParser.LEFT_PARENTHESIS, 0) + + def RIGHT_PARENTHESIS(self): + return self.getToken(PFDLParser.RIGHT_PARENTHESIS, 0) + + def rule_parameter(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PFDLParser.Rule_parameterContext) + else: + return self.getTypedRuleContext(PFDLParser.Rule_parameterContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PFDLParser.COMMA) + else: + return self.getToken(PFDLParser.COMMA, i) + + def getRuleIndex(self): + return PFDLParser.RULE_rule_call + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterRule_call" ): + listener.enterRule_call(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitRule_call" ): + listener.exitRule_call(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitRule_call" ): + return visitor.visitRule_call(self) + else: + return visitor.visitChildren(self) + + + + + def rule_call(self): + + localctx = PFDLParser.Rule_callContext(self, self._ctx, self.state) + self.enterRule(localctx, 110, self.RULE_rule_call) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 664 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + self.state = 665 + self.match(PFDLParser.LEFT_PARENTHESIS) + self.state = 674 + self._errHandler.sync(self) + _la = self._input.LA(1) + if ((((_la - 39)) & ~0x3f) == 0 and ((1 << (_la - 39)) & ((1 << (PFDLParser.TRUE - 39)) | (1 << (PFDLParser.FALSE - 39)) | (1 << (PFDLParser.MINUS - 39)) | (1 << (PFDLParser.INTEGER - 39)) | (1 << (PFDLParser.FLOAT - 39)) | (1 << (PFDLParser.STRING - 39)) | (1 << (PFDLParser.STARTS_WITH_LOWER_C_STR - 39)))) != 0): + self.state = 666 + self.rule_parameter() + self.state = 671 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PFDLParser.COMMA: + self.state = 667 + self.match(PFDLParser.COMMA) + self.state = 668 + self.rule_parameter() + self.state = 673 + self._errHandler.sync(self) + _la = self._input.LA(1) + + + + self.state = 676 + self.match(PFDLParser.RIGHT_PARENTHESIS) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Rule_parameterContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def STARTS_WITH_LOWER_C_STR(self): + return self.getToken(PFDLParser.STARTS_WITH_LOWER_C_STR, 0) + + def value(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PFDLParser.ValueContext) + else: + return self.getTypedRuleContext(PFDLParser.ValueContext,i) + + + def ASSIGNMENT(self): + return self.getToken(PFDLParser.ASSIGNMENT, 0) + + def getRuleIndex(self): + return PFDLParser.RULE_rule_parameter + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterRule_parameter" ): + listener.enterRule_parameter(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitRule_parameter" ): + listener.exitRule_parameter(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitRule_parameter" ): + return visitor.visitRule_parameter(self) + else: + return visitor.visitChildren(self) + + + + + def rule_parameter(self): + + localctx = PFDLParser.Rule_parameterContext(self, self._ctx, self.state) + self.enterRule(localctx, 112, self.RULE_rule_parameter) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 680 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,75,self._ctx) + if la_ == 1: + self.state = 678 + self.match(PFDLParser.STARTS_WITH_LOWER_C_STR) + pass + + elif la_ == 2: + self.state = 679 + self.value() + pass + + + self.state = 684 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PFDLParser.ASSIGNMENT: + self.state = 682 + self.match(PFDLParser.ASSIGNMENT) + self.state = 683 + self.value() + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + + def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): + if self._predicates == None: + self._predicates = dict() + self._predicates[30] = self.expression_sempred + pred = self._predicates.get(ruleIndex, None) + if pred is None: + raise Exception("No predicate with index:" + str(ruleIndex)) + else: + return pred(localctx, predIndex) + + def expression_sempred(self, localctx:ExpressionContext, predIndex:int): + if predIndex == 0: + return self.precpred(self._ctx, 11) + + + if predIndex == 1: + return self.precpred(self._ctx, 10) + + + if predIndex == 2: + return self.precpred(self._ctx, 9) + + + if predIndex == 3: + return self.precpred(self._ctx, 8) + + + if predIndex == 4: + return self.precpred(self._ctx, 7) + + + if predIndex == 5: + return self.precpred(self._ctx, 5) if predIndex == 6: - return self.precpred(self._ctx, 2) + return self.precpred(self._ctx, 4) diff --git a/pfdl_scheduler/parser/PFDLParserVisitor.py b/pfdl_scheduler/parser/PFDLParserVisitor.py index 321dc46..d1837c1 100644 --- a/pfdl_scheduler/parser/PFDLParserVisitor.py +++ b/pfdl_scheduler/parser/PFDLParserVisitor.py @@ -1,4 +1,4 @@ -# Generated from PFDLParser.g4 by ANTLR 4.9.3 +# Generated from temp/PFDLParser.g4 by ANTLR 4.9.3 from antlr4 import * if __name__ is not None and "." in __name__: from .PFDLParser import PFDLParser @@ -14,16 +14,31 @@ def visitProgram(self, ctx:PFDLParser.ProgramContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PFDLParser#program_statement. + def visitProgram_statement(self, ctx:PFDLParser.Program_statementContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PFDLParser#struct. def visitStruct(self, ctx:PFDLParser.StructContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PFDLParser#struct_id. + def visitStruct_id(self, ctx:PFDLParser.Struct_idContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PFDLParser#task. def visitTask(self, ctx:PFDLParser.TaskContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PFDLParser#instance. + def visitInstance(self, ctx:PFDLParser.InstanceContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PFDLParser#task_in. def visitTask_in(self, ctx:PFDLParser.Task_inContext): return self.visitChildren(ctx) @@ -34,6 +49,11 @@ def visitTask_out(self, ctx:PFDLParser.Task_outContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PFDLParser#taskStatement. + def visitTaskStatement(self, ctx:PFDLParser.TaskStatementContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PFDLParser#statement. def visitStatement(self, ctx:PFDLParser.StatementContext): return self.visitChildren(ctx) @@ -119,6 +139,11 @@ def visitAttribute_access(self, ctx:PFDLParser.Attribute_accessContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PFDLParser#attribute_assignment. + def visitAttribute_assignment(self, ctx:PFDLParser.Attribute_assignmentContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PFDLParser#array. def visitArray(self, ctx:PFDLParser.ArrayContext): return self.visitChildren(ctx) @@ -174,5 +199,100 @@ def visitJson_array(self, ctx:PFDLParser.Json_arrayContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PFDLParser#transportStatement. + def visitTransportStatement(self, ctx:PFDLParser.TransportStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#tosCollectionStatement. + def visitTosCollectionStatement(self, ctx:PFDLParser.TosCollectionStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#moveStatement. + def visitMoveStatement(self, ctx:PFDLParser.MoveStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#actionStatement. + def visitActionStatement(self, ctx:PFDLParser.ActionStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#constraintStatement. + def visitConstraintStatement(self, ctx:PFDLParser.ConstraintStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#moveOrderStep. + def visitMoveOrderStep(self, ctx:PFDLParser.MoveOrderStepContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#mosStatement. + def visitMosStatement(self, ctx:PFDLParser.MosStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#actionOrderStep. + def visitActionOrderStep(self, ctx:PFDLParser.ActionOrderStepContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#aosStatement. + def visitAosStatement(self, ctx:PFDLParser.AosStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#orderStep. + def visitOrderStep(self, ctx:PFDLParser.OrderStepContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#transportOrderStep. + def visitTransportOrderStep(self, ctx:PFDLParser.TransportOrderStepContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#tosStatement. + def visitTosStatement(self, ctx:PFDLParser.TosStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#locationStatement. + def visitLocationStatement(self, ctx:PFDLParser.LocationStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#parameterStatement. + def visitParameterStatement(self, ctx:PFDLParser.ParameterStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#eventStatement. + def visitEventStatement(self, ctx:PFDLParser.EventStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#onDoneStatement. + def visitOnDoneStatement(self, ctx:PFDLParser.OnDoneStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#rule_. + def visitRule_(self, ctx:PFDLParser.Rule_Context): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#rule_call. + def visitRule_call(self, ctx:PFDLParser.Rule_callContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PFDLParser#rule_parameter. + def visitRule_parameter(self, ctx:PFDLParser.Rule_parameterContext): + return self.visitChildren(ctx) + + del PFDLParser \ No newline at end of file diff --git a/pfdl_scheduler/parser/pfdl_tree_visitor.py b/pfdl_scheduler/parser/pfdl_tree_visitor.py index b9ff2ef..933f20f 100644 --- a/pfdl_scheduler/parser/pfdl_tree_visitor.py +++ b/pfdl_scheduler/parser/pfdl_tree_visitor.py @@ -7,10 +7,16 @@ """Contains PFDLTreeVisitor class.""" # standard libraries +import json from typing import Dict, List, OrderedDict, Tuple, Union +from pfdl_scheduler.model.instance import Instance +from pfdl_scheduler.pfdl_base_classes import PFDLBaseClasses from pfdl_scheduler.utils import helpers from pfdl_scheduler.model.parallel import Parallel +# 3rd party +from antlr4.tree.Tree import TerminalNodeImpl + # local sources from pfdl_scheduler.validation.error_handler import ErrorHandler @@ -44,15 +50,22 @@ class PFDLTreeVisitor(PFDLParserVisitor): Attributes: error_handler: ErrorHandler instance for printing errors while visiting. current_task: Reference to the currently visited Task. Every visitor method can access it. + pfdl_base_classes: `PFDLBaseClasses` instance for creating new objects. """ - def __init__(self, error_handler: ErrorHandler) -> None: + def __init__( + self, + error_handler: ErrorHandler, + pfdl_base_classes: PFDLBaseClasses = PFDLBaseClasses(), + ) -> None: """Initialize the object. Args: - error_handler: ErrorHandler instance for printing errors while visiting. + error_handler: `ErrorHandler` instance for printing errors while visiting. + pfdl_base_classes: `PFDLBaseClasses` instance for creating new objects. """ self.error_handler: ErrorHandler = error_handler + self.pfdl_base_classes: PFDLBaseClasses = pfdl_base_classes self.current_task: Task = None def visitProgram(self, ctx) -> Process: @@ -63,7 +76,7 @@ def visitProgram(self, ctx) -> Process: for child in ctx.children: process_component = self.visit(child) - if isinstance(process_component, Struct): + if isinstance(process_component, self.pfdl_base_classes.get_class("Struct")): if process_component.name not in process.structs: process.structs[process_component.name] = process_component else: @@ -72,7 +85,7 @@ def visitProgram(self, ctx) -> Process: "is already defined" ) self.error_handler.print_error(error_msg, context=child) - elif isinstance(process_component, Task): + elif isinstance(process_component, self.pfdl_base_classes.get_class("Task")): if process_component.name not in process.tasks: process.tasks[process_component.name] = process_component else: @@ -80,10 +93,66 @@ def visitProgram(self, ctx) -> Process: f"A Task with the name '{process_component.name}' " "is already defined" ) self.error_handler.print_error(error_msg, context=child) + elif isinstance(process_component, self.pfdl_base_classes.get_class("Instance")): + if process_component.name not in process.tasks: + process.instances[process_component.name] = process_component + else: + error_msg = ( + f"An instance with the name '{process_component.name}' " + "is already defined" + ) + self.error_handler.print_error(error_msg, context=child) + + # perform additional steps after visiting the syntax tree + self.execute_additional_tasks(process) + return process + def execute_additional_tasks(self, process: Process) -> None: + """Runs additional parsing methods with full information.""" + + # add instances to task variables so they can be used in expressions + self.addInstancesToAllTasks(process) + + # add attributes to the structs that are inherited from all parent structs + self.add_inherited_attributes_to_structs(process) + + def add_inherited_attributes_to_structs(self, process: Process) -> None: + """Tries to add attributes inherited from the respective parents to all child structs. + + Throws an error if one parent struct name is found to be invalid. + """ + for struct_name, struct in process.structs.items(): + parent_struct_attributes, invalid_parent_name = helpers.get_parent_struct_attributes( + struct_name, process.structs + ) + if not invalid_parent_name: + struct.attributes.update(parent_struct_attributes) + else: + error_msg = ( + f"The Struct '{struct.name}' tries to inherit from an unknown Struct " + f"'{invalid_parent_name}'." + ) + self.error_handler.print_error(error_msg, context=struct.context) + + def visitProgram_statement(self, ctx: PFDLParser.Program_statementContext): + if not isinstance(ctx.children[0], TerminalNodeImpl): + return self.visit(ctx.children[0]) + + def addInstancesToAllTasks(self, process: Process) -> None: + """Adds all instances to the variables of all tasks in the process. + + This method is necessary to use the instances in expressions. + + Args: + process: The `Process` object containing all tasks and instances. + """ + for instance in process.instances.values(): + for task in process.tasks.values(): + task.variables[instance.name] = instance.struct_name + def visitStruct(self, ctx) -> Struct: - struct = Struct() + struct = self.pfdl_base_classes.get_class("Struct")() struct.name = ctx.STARTS_WITH_UPPER_C_STR().getText() struct.context = ctx @@ -99,10 +168,17 @@ def visitStruct(self, ctx) -> Struct: "is already defined in the Struct '{struct.name}'" ) self.error_handler.print_error(error_msg, context=variable_definition_ctx) + + if ctx.struct_id(): + struct.parent_struct_name = self.visitStruct_id(ctx.struct_id()) + return struct + def visitStruct_id(self, ctx: PFDLParser.Struct_idContext) -> str: + return ctx.children[0].getText() + def visitTask(self, ctx) -> Task: - task = Task() + task = self.pfdl_base_classes.get_class("Task")() task.name = ctx.STARTS_WITH_LOWER_C_STR().getText() task.context = ctx @@ -112,8 +188,8 @@ def visitTask(self, ctx) -> Task: task.input_parameters = self.visitTask_in(ctx.task_in()) task.context_dict[IN_KEY] = ctx.task_in() - for statement_ctx in ctx.statement(): - statement = self.visitStatement(statement_ctx) + for statement_ctx in ctx.taskStatement(): + statement = self.visitTaskStatement(statement_ctx) task.statements.append(statement) if ctx.task_out(): task.output_parameters = self.visitTask_out(ctx.task_out()) @@ -121,6 +197,41 @@ def visitTask(self, ctx) -> Task: return task + def visitInstance(self, ctx: PFDLParser.InstanceContext) -> Instance: + instance_name = ctx.STARTS_WITH_LOWER_C_STR().getText() + struct_name = self.visitStruct_id(ctx.struct_id()) + instance = self.pfdl_base_classes.get_class("Instance")( + name=instance_name, struct_name=struct_name, context=ctx + ) + self.current_program_component = instance + for attribute_assignment_ctx in ctx.attribute_assignment(): + attribute_name, attribute_value = self.visitAttribute_assignment( + attribute_assignment_ctx + ) + # JSON value + if isinstance(attribute_value, Dict): + attribute_value = self.pfdl_base_classes.get_class("Instance").from_json( + attribute_value, + self.error_handler, + ctx, + self.pfdl_base_classes, + ) + instance.attributes[attribute_name] = attribute_value + instance.attribute_contexts[attribute_name] = attribute_assignment_ctx + + return instance + + def visitAttribute_assignment( + self, ctx: PFDLParser.Attribute_assignmentContext + ) -> Tuple[List[str], Union[str, Dict]]: + value = None + if ctx.value(): + value = self.visitValue(ctx.value()) + value = helpers.cast_element(value) + else: + value = self.visitJson_object(ctx.json_object()) + return (ctx.STARTS_WITH_LOWER_C_STR().getText(), value) + def visitTask_in(self, ctx: PFDLParser.Task_inContext) -> Dict[str, Union[str, Array]]: input_parameters = OrderedDict() for variable_definition_context in ctx.variable_definition(): @@ -161,7 +272,7 @@ def visitStatement( return statement def visitService_call(self, ctx: PFDLParser.Service_callContext) -> Service: - service = Service() + service = self.pfdl_base_classes.get_class("Service")() service.context = ctx service.name = ctx.STARTS_WITH_UPPER_C_STR().getText() @@ -184,12 +295,15 @@ def visitCall_input( self, ctx: PFDLParser.Call_inputContext ) -> List[Union[str, List[str], Struct]]: input_params = [] - for child in ctx.parameter(): - parameter = self.visitParameter(child) - input_params.append(parameter) - for child in ctx.struct_initialization(): - struct = self.visitStruct_initialization(child) - input_params.append(struct) + for child in ctx.children: + if isinstance(child, self.pfdl_base_classes.get_class("PFDLParser").ParameterContext): + parameter = self.visitParameter(child) + input_params.append(parameter) + elif isinstance( + child, self.pfdl_base_classes.get_class("PFDLParser").Struct_initializationContext + ): + instance = self.visitStruct_initialization(child) + input_params.append(instance) return input_params def visitCall_output(self, ctx: PFDLParser.Call_outputContext) -> Dict[str, Union[str, Array]]: @@ -210,16 +324,21 @@ def visitParameter(self, ctx: PFDLParser.ParameterContext) -> Union[str, List[st return ctx.STARTS_WITH_LOWER_C_STR().getText() return self.visitAttribute_access(ctx.attribute_access()) - def visitStruct_initialization(self, ctx: PFDLParser.Struct_initializationContext) -> Struct: + def visitStruct_initialization(self, ctx: PFDLParser.Struct_initializationContext) -> Instance: json_string = ctx.json_object().getText() - struct = Struct.from_json(json_string, self.error_handler, ctx.json_object()) - struct.name = ctx.STARTS_WITH_UPPER_C_STR().getText() - struct.context = ctx - return struct + instance = self.pfdl_base_classes.get_class("Instance").from_json( + json.loads(json_string), + self.error_handler, + ctx.json_object(), + self.pfdl_base_classes, + ) + instance.name = ctx.STARTS_WITH_UPPER_C_STR().getText() + instance.context = ctx + return instance def visitTask_call(self, ctx: PFDLParser.Task_callContext) -> TaskCall: - task_call = TaskCall() + task_call = self.pfdl_base_classes.get_class("TaskCall")() task_call.name = ctx.STARTS_WITH_LOWER_C_STR().getText() task_call.context = ctx @@ -239,7 +358,7 @@ def visitTask_call(self, ctx: PFDLParser.Task_callContext) -> TaskCall: return task_call def visitParallel(self, ctx: PFDLParser.ParallelContext) -> Parallel: - parallel = Parallel() + parallel = self.pfdl_base_classes.get_class("Parallel")() parallel.context = ctx for task_call_context in ctx.task_call(): task_call = self.visitTask_call(task_call_context) @@ -247,7 +366,7 @@ def visitParallel(self, ctx: PFDLParser.ParallelContext) -> Parallel: return parallel def visitWhile_loop(self, ctx: PFDLParser.While_loopContext) -> WhileLoop: - while_loop = WhileLoop() + while_loop = self.pfdl_base_classes.get_class("WhileLoop")() while_loop.context = ctx while_loop.expression = self.visitExpression(ctx.expression()) @@ -258,7 +377,7 @@ def visitWhile_loop(self, ctx: PFDLParser.While_loopContext) -> WhileLoop: return while_loop def visitCounting_loop(self, ctx: PFDLParser.Counting_loopContext) -> CountingLoop: - counting_loop = CountingLoop() + counting_loop = self.pfdl_base_classes.get_class("CountingLoop")() counting_loop.context = ctx counting_loop.counting_variable = ctx.STARTS_WITH_LOWER_C_STR().getText() @@ -277,7 +396,7 @@ def visitCounting_loop(self, ctx: PFDLParser.Counting_loopContext) -> CountingLo return counting_loop def visitCondition(self, ctx: PFDLParser.ConditionContext) -> Condition: - condition_statement = Condition() + condition_statement = self.pfdl_base_classes.get_class("Condition")() condition_statement.context = ctx condition_statement.expression = self.visitExpression(ctx.expression()) @@ -326,7 +445,7 @@ def visitPrimitive(self, ctx: PFDLParser.PrimitiveContext): return ctx.getText() def initializeArray(self, array_ctx: PFDLParser.ArrayContext, variable_type: str) -> Array: - array = Array() + array = self.pfdl_base_classes.get_class("Array")() array.type_of_elements = variable_type array.context = array_ctx length = self.visitArray(array_ctx) diff --git a/pfdl_scheduler/petri_net/generator.py b/pfdl_scheduler/petri_net/generator.py index 995098a..3333228 100644 --- a/pfdl_scheduler/petri_net/generator.py +++ b/pfdl_scheduler/petri_net/generator.py @@ -31,6 +31,7 @@ from pfdl_scheduler.petri_net.drawer import draw_petri_net from pfdl_scheduler.petri_net.callbacks import PetriNetCallbacks +from pfdl_scheduler.pfdl_base_classes import PFDLBaseClasses plugins.load(["labels", "gv", "clusters"], "snakes.nets", "nets") @@ -82,6 +83,7 @@ class PetriNetGenerator: callbacks: A PetriNetCallbacks instance representing functions called while execution. generate_test_ids: A boolean indicating if test ids (counting from 0) should be generated. used_in_extension: A boolean indicating if the Generator is used within the extension. + pfdl_base_classes: An instance of `PFDLBaseClasses`. """ def __init__( @@ -91,6 +93,7 @@ def __init__( generate_test_ids: bool = False, draw_net: bool = True, file_name: str = "petri_net", + pfdl_base_classes: PFDLBaseClasses = PFDLBaseClasses(), ) -> None: """Initialize the object. @@ -100,6 +103,7 @@ def __init__( generate_test_ids: A boolean indicating if test ids (counting from 0) should be generated. draw_net: A boolean indicating if the petri net should be drawn. file_name: The desired filename of the petri net image. + pfdl_base_classes: An instance of `PFDLBaseClasses`. """ if used_in_extension: @@ -117,11 +121,13 @@ def __init__( self.transition_dict: OrderedDict = OrderedDict() self.place_dict: Dict = {} self.task_started_uuid: str = "" - self.callbacks: PetriNetCallbacks = PetriNetCallbacks() + self.callbacks: PetriNetCallbacks = pfdl_base_classes.get_class("PetriNetCallbacks")() self.generate_test_ids: bool = generate_test_ids self.used_in_extension: bool = used_in_extension self.tree = None self.file_name = file_name + self.pfdl_base_classes = pfdl_base_classes + self.service_apis: list[ServiceAPI] = [] def add_callback(self, transition_uuid: str, callback_function: Callable, *args: Any) -> None: """Registers the given callback function in the transition_dict. @@ -156,7 +162,7 @@ def generate_petri_net(self, process: Process) -> PetriNet: group_uuid = str(uuid.uuid4()) self.tree = Node(group_uuid, start_task.name) - task_context = TaskAPI(start_task, None) + task_context = self.pfdl_base_classes.get_class("TaskAPI")(start_task, None) if self.generate_test_ids: task_context.uuid = "0" @@ -244,17 +250,17 @@ def generate_statements( in_loop, ) - if isinstance(statement, Service): + if isinstance(statement, self.pfdl_base_classes.get_class("Service")): connection_uuids = [self.generate_service(*args)] - elif isinstance(statement, TaskCall): + elif isinstance(statement, self.pfdl_base_classes.get_class("TaskCall")): connection_uuids = self.generate_task_call(*args) - elif isinstance(statement, Parallel): + elif isinstance(statement, self.pfdl_base_classes.get_class("Parallel")): connection_uuids = [self.generate_parallel(*args)] - elif isinstance(statement, CountingLoop): + elif isinstance(statement, self.pfdl_base_classes.get_class("CountingLoop")): connection_uuids = [self.generate_counting_loop(*args)] - elif isinstance(statement, WhileLoop): + elif isinstance(statement, self.pfdl_base_classes.get_class("WhileLoop")): connection_uuids = [self.generate_while_loop(*args)] - elif isinstance(statement, Condition): + elif isinstance(statement, self.pfdl_base_classes.get_class("Condition")): connection_uuids = self.generate_condition(*args) else: connection_uuids = self.handle_other_statements(*args) @@ -280,7 +286,10 @@ def generate_service( group_uuid = str(uuid.uuid4()) service_node = Node(group_uuid, service.name, node) - service_api = ServiceAPI(service, task_context, in_loop=in_loop) + service_api = self.pfdl_base_classes.get_class("ServiceAPI")( + service, task_context, in_loop=in_loop + ) + self.service_apis.append(service_api) service_started_uuid = create_place(service.name + " started", self.net, service_node) service_finished_uuid = create_place(service.name + " finished", self.net, service_node) @@ -327,7 +336,9 @@ def generate_task_call( The uuids of the last transitions of the TaskCall petri net component. """ called_task = self.tasks[task_call.name] - new_task_context = TaskAPI(called_task, task_context, task_call=task_call, in_loop=in_loop) + new_task_context = self.pfdl_base_classes.get_class("TaskAPI")( + called_task, task_context, task_call=task_call, in_loop=in_loop + ) group_uuid = str(uuid.uuid4()) task_node = Node(group_uuid, task_call.name, node) diff --git a/pfdl_scheduler/pfdl_base_classes.py b/pfdl_scheduler/pfdl_base_classes.py new file mode 100644 index 0000000..3ae4852 --- /dev/null +++ b/pfdl_scheduler/pfdl_base_classes.py @@ -0,0 +1,92 @@ +# Copyright The PFDL Contributors +# +# Licensed under the MIT License. +# For details on the licensing terms, see the LICENSE file. +# SPDX-License-Identifier: MIT + +"""Contains the PFDLBaseClasses class.""" + +import os +import importlib +import inspect + + +class PFDLBaseClasses: + def __init__(self, base_dir="pfdl_scheduler"): + self._class_registry = {} + self._class_instances = {} + self._base_dir = base_dir # Base directory for project scanning + self._default_classes = self._scan_project_classes() + + def register_class(self, name, class_reference): + """Register a custom class with a specific name.""" + self._class_registry[name] = class_reference + + def get_class(self, name): + """Return the registered class if available, otherwise the default class.""" + # Check if the class is registered + if name in self._class_registry: + return self._class_registry[name] + + # Fall back to default if not registered + return self._get_default_class(name) + + def get_instance(self, name, *args, **kwargs): + """Instantiate the class dynamically if not already instantiated.""" + if name not in self._class_instances: + class_ref = self.get_class(name) + if class_ref is None: + raise ValueError(f"Class '{name}' not found.") + self._class_instances[name] = class_ref(*args, **kwargs) # Instantiate with args + return self._class_instances[name] + + def _scan_project_classes(self): + """Scan the project folder for all available classes, ignoring 'plugins' folder.""" + class_map = {} + for root, dirs, files in os.walk(self._base_dir): + # Skip the 'plugins' folder if encountered + dirs[:] = [d for d in dirs if d != "plugins"] + + for file in files: + if file.endswith(".py") and not file.startswith("__"): + # Create the module path by converting file path to importable module + module_path = os.path.join(root, file) + module_name = self._module_name_from_path(module_path) + + try: + # Dynamically import the module + module = importlib.import_module(module_name) + + # Find all classes defined in the module + for name, obj in inspect.getmembers(module, inspect.isclass): + # Map class name to its full module path + class_map[name] = f"{module_name}.{name}" + except Exception as e: + # Handle any import errors + print(f"Failed to import {module_name}: {e}") + return class_map + + def _module_name_from_path(self, path): + """Convert a file path to a valid module import path.""" + module_name = path.replace(os.sep, ".")[:-3] + if module_name.startswith("."): + module_name = module_name[1:] + return module_name + + def _get_default_class(self, name): + """Dynamically load the default class based on the component name.""" + if name not in self._default_classes: + raise ValueError(f"Default class for '{name}' not found.") + + # Extract the module path and class name + full_class_path = self._default_classes[name] + module_path, class_name = full_class_path.rsplit(".", 1) + + # Dynamically import the class from the module + module = importlib.import_module(module_path) + return getattr(module, class_name) + + def clear_registry(self): + """Clear the registry and instances.""" + self._class_registry.clear() + self._class_instances.clear() diff --git a/pfdl_scheduler/plugins/README.md b/pfdl_scheduler/plugins/README.md new file mode 100644 index 0000000..491375d --- /dev/null +++ b/pfdl_scheduler/plugins/README.md @@ -0,0 +1,52 @@ + + +# PFDL Plugin System +The PFDL plugin system can be used to create plugins that extends the grammar and the underlying logic of the PFDL. +In the following, the different steps for creating your own plugin will be explained in detail. + +## The Plugin Loader - modify PFDL code +The core of the plugin system is the `PluginLoader` class. This class can be used to load the desired plugins and for returning the overwritten classes. +The PFDL code base was designed in a way such that its base classes can be changed. Thus, the plugin loader returns a `PFDLBaseClasses` object which contains all base classes of the PFDL overwritten by the plugins (See the example below). + +The plugin loader's `load_plugins` method requries a list of strings which are essentially the paths to the plugin folders in which he will search for classes with the decorator `@base_class("")`. Important here is that the name of the overwritten base class must match with the acutal class. If a class has a decorator and is inside a folder in the plugins folder it will be used for overwritting the base respective base class. Additionally, the overwritten classes needs to inherit from the base class so that the new combined class receives all methods and attributes. + +```python +@base_class("Instance") +class Instance(pfdl_scheduler.model.instance.Instance): + ... +``` + + An example of how to load plugins and receive the overwritten base classes is shown here: + +```python +plugin_loader = PluginLoader() +plugin_loader.load_plugins(["plugins/sample_plugin_folder"]) + +pfdl_base_classes = plugin_loader.get_pfdl_base_classes() +``` + +The `Scheduler` class, which is the entry point for using the PFDL, has an optional paramter for the base classes which can be used with the newly created plugin base classes. This way, the user made changes will be directly inserted into the PFDL base code. If you want to also modify the Scheduler class a complete example would look like this: + +```python +scheduler = pfdl_base_classes.scheduler_class( + ... + pfdl_base_classes=pfdl_base_classes, +) +``` + +## Merging Grammars +If you want to make changes to the base grammar of the PFDL you can do that by creating a custom Lexer and Parser file in the antlr `.g4` format. You only need to define your required rules. +If the rule names are not in the base grammar they will just be added as new rules. +If they already exists they will be added as an alternative to the old rule. + +To generate a new grammar that contains the old rules and the newly or overwritten rules of the plugins, the `grammar_merge.py` script has to be executed inside the `plugins` folder. +The script requires a list of parser and subsequently a list of lexer files so that you can define which plugins should be used to create a combined grammar. +The order of the passed grammar files can change the overall result so keep that in mind. Moreover, due to the nature of such systems, some plugins might be not working when used together! + +### The Parser folder +The `grammar_merge.py` script will call the ANTLR build script to generate Lexer, Parser, and Visitor python files that can be used by the plugin and stores them inside the `parser` folder inside the `plugins` folder. +Internally, the newly generated clases will be inserted into the base classes which are then used by the PFDL base code. +In addition, if you want to check the merged grammar or want to manually edit it, the parser folder also contains the merged `.g4` files from which the parser files are generated. \ No newline at end of file diff --git a/pfdl_scheduler/plugins/__init__.py b/pfdl_scheduler/plugins/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pfdl_scheduler/plugins/grammar_merge.py b/pfdl_scheduler/plugins/grammar_merge.py new file mode 100644 index 0000000..1c80900 --- /dev/null +++ b/pfdl_scheduler/plugins/grammar_merge.py @@ -0,0 +1,197 @@ +# Copyright The PFDL Contributors +# +# Licensed under the MIT License. +# For details on the licensing terms, see the LICENSE file. +# SPDX-License-Identifier: MIT + +"""Merges multiple grammar files into a single grammar file.""" + +import argparse +import re +import subprocess +from pathlib import Path +from typing import Dict, List + +PLUGIN_ENTRY_POINT = "// {Plugin_Move_To_Front}" +LEXER_PLUGIN_INSERTION_POINT = "// {Plugin_Insertion_Point}" + +# Regular expression to match grammar rules including the custom entry point for plugins +rule_pattern = re.compile(r"((\/\/\s*\{Plugin_Move_To_Front\}\s*\n)?\w+\s*:\s*[^;]+;)") + + +def extract_rules(grammar_content: str) -> Dict[str, str]: + """Extracts rules from a given grammar content while preserving the original formatting. + + Args: + grammar_content: The entire content of the grammar + + Returns: + A dictionary of rule names and their entire formatted content. + """ + rules = {} + for match in rule_pattern.finditer(grammar_content): + rule = match.group(0) + rule_name = rule.split(":")[0].strip() + rules[rule_name] = rule + return rules + + +def merge_parser(base_grammar: str, new_grammar: str) -> str: + """Merges two parser contents while preserving the format of the base grammar. + + Args: + base_grammar: The base grammar content. + new_grammar: The new grammar content to be merged. + + Returns: + The merged grammar content as a string. + """ + base_rules = extract_rules(base_grammar) + new_rules = extract_rules(new_grammar) + + merged_grammar = base_grammar # Start with the base grammar as-is + + for rule_name, new_rule in new_rules.items(): + move_rule_to_the_front = False + if new_rule.strip().startswith(PLUGIN_ENTRY_POINT): + move_rule_to_the_front = True + new_rule = new_rule.replace(PLUGIN_ENTRY_POINT, "").strip() + rule_name = rule_name.replace(PLUGIN_ENTRY_POINT, "").strip() + + if rule_name in base_rules: + # Add new alternative to the existing rule + base_rule = base_rules[rule_name] + # Find the position before the semicolon to insert the new alternative + if move_rule_to_the_front: + insert_pos = base_rule.find(":") + 1 + merged_grammar = merged_grammar.replace( + base_rule, + base_rule[:insert_pos] + + new_rule.split(":")[1].strip().rstrip(";") + + " | " + + base_rule[insert_pos:], + ) + else: + insert_pos = base_rule.rfind(";") + merged_grammar = merged_grammar.replace( + base_rule, + base_rule[:insert_pos] + + " | " + + new_rule.split(":")[1].strip().rstrip(";") + + base_rule[insert_pos:], + ) + else: + # Add new rule at the end of the grammar with appropriate formatting + merged_grammar += "\n\n" + new_rule + + return merged_grammar + + +def merge_lexer(base_lexer: str, new_lexer: str) -> str: + """Merges two lexer contents while preserving the format of the base lexer. + + Args: + base_lexer: The base lexer content. + new_lexer: The new lexer content to be merged. + + Returns: + The merged lexer content as a string. + """ + merged_grammar = base_lexer # Start with the base grammar as-is + + insert_position = merged_grammar.find(LEXER_PLUGIN_INSERTION_POINT) + merged_grammar = ( + merged_grammar[:insert_position] + new_lexer + "\n\n" + merged_grammar[insert_position:] + ) + + return merged_grammar + + +def merge_multiple_parsers(parser_files: List[str]) -> str: + """Merge multiple grammar files. + + Args: + parser_files: A list of file paths to the grammar files. + + Returns: + The merged grammar content as a string. + """ + with open(parser_files[0], "r") as file: + base_grammar = file.read() + + for grammar_file in parser_files[1:]: + with open(grammar_file, "r") as file: + new_grammar = file.read() + base_grammar = merge_parser(base_grammar, new_grammar) + return base_grammar + + +def merge_multiple_lexers(lexer_files: List[str]) -> str: + """Merge multiple lexer files. + + Args: + lexer_files: A list of file paths to the lexer files. + + Returns: + The merged lexer content as a string. + """ + with open(lexer_files[0], "r") as file: + base_grammar = file.read() + + for grammar_file in lexer_files[1:]: + with open(grammar_file, "r") as file: + new_grammar = file.read() + base_grammar = merge_lexer(base_grammar, new_grammar) + return base_grammar + + +if __name__ == "__main__": + + script_description = """ + This script merges multiple grammar and lexer files into single grammar and lexer files respectively. + It then generates the corresponding ANTLR parser and lexer files for Python3. + + Usage: + python grammar_merge.py + + Arguments: + parser_files: List of file paths to the grammar files to be merged. + lexer_files: List of file paths to the lexer files to be merged. + + Example: + python grammar_merge.py parser1.g4 parser2.g4 lexer1.g4 lexer2.g4 + """ + parser = argparse.ArgumentParser( + prog="PFDL grammar merge script", + description=script_description, + formatter_class=argparse.RawTextHelpFormatter, + ) + parser.add_argument("parser_files", type=str, nargs="+") + parser.add_argument("lexer_files", type=str, nargs="+") + + args = parser.parse_args() + + parser_files = ["../../pfdl_grammar/PFDLParser.g4", *args.parser_files] + merged_parser = merge_multiple_parsers(parser_files) + + lexer_files = ["../../pfdl_grammar/PFDLLexer.g4", *args.lexer_files] + merged_lexer = merge_multiple_lexers(lexer_files) + + file = Path("parser/PFDLParser.g4") + file.parent.mkdir(parents=True, exist_ok=True) + file.write_text(merged_parser) + + file = Path("parser/PFDLLexer.g4") + file.write_text(merged_lexer) + + subprocess.call( + [ + "antlr4", + "-v", + "4.9.3", + "-Dlanguage=Python3", + "-visitor", + "parser/PFDLLexer.g4", + "parser/PFDLParser.g4", + ] + ) diff --git a/pfdl_scheduler/plugins/parser/__init__.py b/pfdl_scheduler/plugins/parser/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pfdl_scheduler/plugins/plugin_loader.py b/pfdl_scheduler/plugins/plugin_loader.py new file mode 100644 index 0000000..7c28528 --- /dev/null +++ b/pfdl_scheduler/plugins/plugin_loader.py @@ -0,0 +1,236 @@ +# Copyright The PFDL Contributors +# +# Licensed under the MIT License. +# For details on the licensing terms, see the LICENSE file. +# SPDX-License-Identifier: MIT + +"""Contains the PluginLoader class for dynamically loading plugins for the PFDL.""" + +from functools import wraps +import importlib.util +import os +import sys +import inspect +from typing import List +from pathlib import Path + +from pfdl_scheduler.pfdl_base_classes import PFDLBaseClasses + +base_classes_registry = {} + + +def base_class(existing_class_name): + """A Decorator to mark a class that will extend an existing class. + + Registers the class in the base_classes_registry. + """ + + def decorator(cls): + if existing_class_name not in base_classes_registry: + base_classes_registry[existing_class_name] = [] + base_classes_registry[existing_class_name].append(cls) + return cls + + return decorator + + +def wrap_method(original_method, new_methods): + """Chains multiple methods together, calling them in order.""" + + @wraps(original_method) + def wrapper(*args, **kwargs): + for method in new_methods: + result = method(*args, **kwargs) + return result + + return wrapper + + +def apply_plugin_to_base(base_class, plugin_class): + """Applies the methods and attributes of the plugin_class to the base_class. + + Handles method overwrites with different argument counts, including class methods. + """ + + class CombinedClass(base_class, plugin_class): + def __init__(self, *args, **kwargs): + plugin_class.__init__(self, *args, **kwargs) + + # Method containers for chaining + method_overrides = {} + + for name, method in plugin_class.__dict__.items(): + # Handle instance methods and class methods separately + if callable(method): + if name in method_overrides: + method_overrides[name].append(method) + else: + method_overrides[name] = [getattr(base_class, name, None), method] + elif not name.startswith("__"): + # Add class attributes (non-callable) + setattr(CombinedClass, name, method) + + # Add or override instance methods in CombinedClass + for name, methods in method_overrides.items(): + original_method = methods[0] if methods[0] is not None else None + combined_methods = methods[1:] # Plugins' methods + + if original_method: + setattr(CombinedClass, name, wrap_method(original_method, combined_methods)) + else: + setattr( + CombinedClass, name, wrap_method(lambda *args, **kwargs: None, combined_methods) + ) + + # Add class-level attributes + for name, attr in plugin_class.__dict__.items(): + if not callable(attr) and not name.startswith("__"): + if not hasattr(CombinedClass, name): + setattr(CombinedClass, name, attr) + + CombinedClass.__name__ = base_class.__name__ + CombinedClass.__qualname__ = CombinedClass.__name__ + + return CombinedClass + + +class PluginLoader: + """Loads plugins and applies them to the existing classes in the main project. + + The PluginLoader class is responsible for dynamically loading plugins from the plugin folder + and applying them to the existing classes in the main project. It automatically detects all + classes in the main project and combines them with the plugin classes to create the final classes. + """ + + def __init__(self): + self.existing_classes = self.get_existing_classes() + + def get_existing_classes(self): + """Automatically detect and load all classes in the main project, excluding the plugin folder.""" + existing_classes = {} + main_project_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) + plugins_path = os.path.abspath(os.path.dirname(__file__)) # Path to the plugin folder + + # Walk through the project files to find Python files excluding the plugins folder + for root, _, files in os.walk(main_project_path): + if root.startswith(plugins_path): + continue # Skip files inside the plugins folder + + for file in files: + if file.endswith(".py"): + module_name = os.path.splitext(file)[0] + module_path = os.path.join(root, file) + + if module_name == "__init__": + continue + + # Convert file path to importable module name + relative_path = os.path.relpath(module_path, main_project_path) + module_import_name = relative_path.replace(os.path.sep, ".")[ + :-3 + ] # Remove '.py' + + try: + # Dynamically import the module + spec = importlib.util.spec_from_file_location( + module_import_name, module_path + ) + module = importlib.util.module_from_spec(spec) + sys.modules[module_import_name] = module + spec.loader.exec_module(module) + + # Inspect the module for classes + for name, obj in inspect.getmembers(module, inspect.isclass): + if obj.__module__ == module_import_name: + existing_classes[name] = obj + + except Exception as e: + print(f"Error loading module {module_import_name}: {e}") + + return existing_classes + + def load_plugin_modules(self, module_name, module_path): + """Dynamically import a module given its path and register any classes that overwrite base classes.""" + spec = importlib.util.spec_from_file_location(module_name, module_path) + module = importlib.util.module_from_spec(spec) + sys.modules[module_name] = module + spec.loader.exec_module(module) + + def load_plugins(self, plugins: List[str]): + """Recursively load all Python files from plugin folders.""" + for plugin_folder in plugins: + plugin_path = Path(__file__).parent / plugin_folder + + if not plugin_path.is_dir(): + raise ValueError("given plugin could not be found") + + # Walk through all files in the plugin folder + for root, _, files in os.walk(plugin_path): + for file in files: + if file.endswith(".py"): + module_name = f"{plugin_folder}.{file[:-3]}" # Plugin folder + filename without .py + module_path = os.path.join(root, file) + self.load_plugin_modules(module_name, module_path) + + def get_final_classes(self): + """Return a dictionary of final classes after applying plugins.""" + final_classes = {} + + for class_name, base_class in self.existing_classes.items(): + if class_name in base_classes_registry: + # Combine the existing class with the plugin classes + combined_class = base_class + for plugin_class in base_classes_registry[class_name]: + combined_class = apply_plugin_to_base(combined_class, plugin_class) + + final_classes[class_name] = combined_class + else: + final_classes[class_name] = base_class + + return final_classes + + def get_pfdl_base_classes( + self, pfdl_base_classes_path: str = "pfdl_scheduler" + ) -> PFDLBaseClasses: + """Return an instance of `PFDLBaseClasses` populated with final classes after applying plugins. + + Class names are dynamically handled. The base classes are populated with the final classes + after applying plugins, and the registry is updated with any new classes that are not already + present in the base classes. + + Args: + pfdl_base_classes_path: The path to the PFDL base classes module. + + Returns: + An instance of `PFDLBaseClasses` populated with the final classes after applying plugins. + """ + final_classes = self.get_final_classes() + base_classes = PFDLBaseClasses(base_dir=pfdl_base_classes_path) + + for class_name, class_ref in final_classes.items(): + # Try to find a matching property on the base class + # Convert class_name to its lower_snake_case form to match typical property naming conventions + property_name = self._class_name_to_property_name(class_name) + + # If the property exists, set it dynamically + if hasattr(base_classes, property_name): + setattr(base_classes, property_name, class_ref) + else: + base_classes.register_class(class_name, class_ref) + + return base_classes + + def _class_name_to_property_name(self, class_name: str) -> str: + """Converts a class name to a property name by converting CamelCase to snake_case. + + Args: + class_name: The name of the class to convert. + + Returns: + The converted property name. + """ + import re + + # Convert CamelCase to snake_case, and append '_class' to the name + s1 = re.sub("([a-z])([A-Z])", r"\1_\2", class_name).lower() + return f"{s1}_class" diff --git a/pfdl_scheduler/scheduler.py b/pfdl_scheduler/scheduler.py index d1f0fc5..322d066 100644 --- a/pfdl_scheduler/scheduler.py +++ b/pfdl_scheduler/scheduler.py @@ -22,13 +22,17 @@ from pfdl_scheduler.api.task_api import TaskAPI from pfdl_scheduler.api.service_api import ServiceAPI +from pfdl_scheduler.pfdl_base_classes import PFDLBaseClasses from pfdl_scheduler.utils.parsing_utils import parse_program from pfdl_scheduler.petri_net.generator import Node, PetriNetGenerator -from pfdl_scheduler.petri_net.logic import PetriNetLogic from pfdl_scheduler.scheduling.event import Event -from pfdl_scheduler.scheduling.event import START_PRODUCTION_TASK, SET_PLACE, SERVICE_FINISHED +from pfdl_scheduler.scheduling.event import ( + START_PRODUCTION_TASK, + SET_PLACE, + SERVICE_FINISHED, +) from pfdl_scheduler.scheduling.task_callbacks import TaskCallbacks from pfdl_scheduler.api.observer_api import NotificationType, Observer, Subject @@ -50,7 +54,9 @@ class Scheduler(Subject): The scheduler comprises almost the complete execution of a production order including the parsing of the PFDL description, model creation and validation and execution of the petri net. It interacts with the execution engines and informs them about services - or tasks which started or finished. + or tasks which started or finished. The pfdl_base_classes attribute is one of the most + impoortant attributes of the scheduler. It holds the base classes for the scheduler that + can be overwritten to extend the scheduler with plugins. This class implements the Observer pattern and serves as subject. Observers can be registered in the scheduler and receive updates (e.g. log entries, info about a new petri net img,..) @@ -68,6 +74,7 @@ class Scheduler(Subject): generate_test_ids: Indicates whether test ids should be generated. test_id_counters: A List consisting of counters for the test ids of tasks and services. observers: List of `Observers` used to update them on a `notify` call. + pfdl_base_classes: A `PFDLBaseClasses` instance which holds the base classes for the scheduler. """ def __init__( @@ -77,6 +84,7 @@ def __init__( draw_petri_net: bool = True, scheduler_uuid: str = "", dashboard_host_address: str = "", + pfdl_base_classes: PFDLBaseClasses = PFDLBaseClasses("pfdl_scheduler"), ) -> None: """Initialize the object. @@ -92,24 +100,46 @@ def __init__( draw_petri_net: A boolean indicating whether the petri net should be drawn. scheduler_uuid: A unique ID to identify the Scheduer / Production Order dashboard_host_address: The address of the Dashboard (if existing) + pfdl_base_classes: A `PFDLBaseClasses` instance which holds the base classes for the scheduler. """ - self.init_scheduler(scheduler_uuid, generate_test_ids) - self.pfdl_file_valid, self.process, pfdl_string = parse_program(pfdl_program) + self.init_scheduler( + scheduler_uuid, + generate_test_ids, + pfdl_base_classes.get_instance("PetriNetGenerator"), + pfdl_base_classes.get_instance("TaskCallbacks"), + ) + self.pfdl_file_valid, self.process, pfdl_string = parse_program( + pfdl_program, pfdl_base_classes + ) if self.pfdl_file_valid: - self.petri_net_generator = PetriNetGenerator( + self.petri_net_generator = pfdl_base_classes.get_class("PetriNetGenerator")( "", generate_test_ids=self.generate_test_ids, draw_net=draw_petri_net, file_name=self.scheduler_uuid, ) - self.setup_scheduling(draw_petri_net) + self.setup_scheduling(draw_petri_net, pfdl_base_classes.get_class("PetriNetLogic")) if dashboard_host_address != "": self.attach( DashboardObserver(dashboard_host_address, self.scheduler_uuid, pfdl_string) ) - def init_scheduler(self, scheduler_uuid: str, generate_test_ids: bool): + def init_scheduler( + self, + scheduler_uuid: str, + generate_test_ids: bool, + petri_net_generator: PetriNetGenerator, + task_callbacks: TaskCallbacks, + ) -> None: + """Initialize the scheduler with the given parameters. + + Args: + scheduler_uuid: A unique ID to identify the scheduler / production order + generate_test_ids: A boolean indicating whether test ids should be generated. + petri_net_generator: A `PetriNetGenerator` instance for generating the petri net. + task_callbacks: `TaskCallbacks` instance which holds the registered callbacks. + """ if scheduler_uuid == "": self.scheduler_uuid: str = str(uuid.uuid4()) else: @@ -117,9 +147,8 @@ def init_scheduler(self, scheduler_uuid: str, generate_test_ids: bool): self.running: bool = False self.pfdl_file_valid: bool = False self.process: Process = None - self.petri_net_generator: PetriNetGenerator = None - self.petri_net_logic: PetriNetLogic = None - self.task_callbacks: TaskCallbacks = TaskCallbacks() + self.petri_net_generator: PetriNetGenerator = petri_net_generator + self.task_callbacks: TaskCallbacks = task_callbacks self.variable_access_function: Callable[[str], str] = None self.loop_counters: Dict[str, Dict[str, int]] = {} self.awaited_events: List[Event] = [] @@ -127,11 +156,16 @@ def init_scheduler(self, scheduler_uuid: str, generate_test_ids: bool): self.test_id_counters: List[int] = [0, 0] self.observers: List[Observer] = [] - def setup_scheduling(self, draw_petri_net: bool): + def setup_scheduling(self, draw_petri_net: bool, petri_net_logic_class) -> None: + """Setup the scheduling process. + + This method is called after the PFDL file was successfully parsed and the petri net + generator was created. It generates the petri net and creates the petri net logic. + """ self.register_for_petrinet_callbacks() self.petri_net_generator.generate_petri_net(self.process) - self.petri_net_logic = PetriNetLogic( + self.petri_net_logic = petri_net_logic_class( self.petri_net_generator, draw_petri_net, file_name=self.scheduler_uuid ) @@ -359,7 +393,11 @@ def on_service_finished(self, service_api: ServiceAPI) -> None: self.notify(NotificationType.LOG_EVENT, (log_entry, logging.INFO, False)) def on_condition_started( - self, condition: Condition, then_uuid: str, else_uuid: str, task_context: TaskAPI + self, + condition: Condition, + then_uuid: str, + else_uuid: str, + task_context: TaskAPI, ) -> None: """Executes Scheduling logic when a Condition statement is started.""" if self.check_expression(condition.expression, task_context): diff --git a/pfdl_scheduler/scheduling/event.py b/pfdl_scheduler/scheduling/event.py index 2fec82f..dcbf075 100644 --- a/pfdl_scheduler/scheduling/event.py +++ b/pfdl_scheduler/scheduling/event.py @@ -33,11 +33,10 @@ def __init__(self, event_type: str = "", data: Dict = None) -> None: self.event_type: str = event_type self.data: Dict = data - def __eq__(self, other: "Event"): - if not isinstance(other, Event): - # don't attempt to compare against unrelated types - return NotImplemented - return self.event_type == other.event_type and self.data == other.data + def __eq__(self, other: object) -> bool: + if hasattr(other, "event_type") and hasattr(other, "data"): + return self.event_type == other.event_type and self.data == other.data + return False @classmethod def from_json(cls, json_string: str) -> Union[None, "Event"]: diff --git a/pfdl_scheduler/utils/helpers.py b/pfdl_scheduler/utils/helpers.py index 358439c..429a286 100644 --- a/pfdl_scheduler/utils/helpers.py +++ b/pfdl_scheduler/utils/helpers.py @@ -7,7 +7,7 @@ """Helper functions used in the project (especially in the SemanticErrorChecker).""" # standard libraries -from typing import Dict, List, Union +from typing import Dict, List, Tuple, Union import operator # local sources @@ -15,6 +15,54 @@ from pfdl_scheduler.model.task import Task +def get_parent_struct_names( + struct_name: str, structs: Dict[str, Struct] +) -> Tuple[List[str], Union[str, None]]: + """Returns the name of the parent and subsequent parents of the given struct. + + Args: + struct_name: Name of the struct for which the parent names should be returned. + structs: A Dict that contains all Structs of the PFDL program. + + Returns: + Tuple containing + - List of parent names as string + - String of an invalid parent name that was not found or None + """ + parent_struct_names = [] + parent_struct_name = structs[struct_name].parent_struct_name + while parent_struct_name != "" and parent_struct_name is not None: + if parent_struct_name not in structs: + return [], parent_struct_name + parent_struct_names.append(parent_struct_name) + parent_struct_name = structs[parent_struct_name].parent_struct_name + return parent_struct_names, None + + +def get_parent_struct_attributes( + struct_name: str, structs: Dict[str, Struct] +) -> Tuple[Dict, Union[str | None]]: + """Returns the attributes of the parent and subsequent parents of the given struct. + + Args: + struct_name: Name of the struct for which the parent names should be returned. + structs: A Dict that contains all Structs of the PFDL program. + + Returns: + Tuple containing + - Dict that maps attribute names of parents to the corresponding type. + - String of an invalid parent name that was not found or None + """ + parent_struct_attributes = {} + parent_struct_names, invalid_parent_name = get_parent_struct_names(struct_name, structs) + if not invalid_parent_name: + for parent_struct_name in parent_struct_names: + parent_struct_attributes.update(structs[parent_struct_name].attributes) + return parent_struct_attributes, None + + return parent_struct_attributes, invalid_parent_name + + def get_type_of_variable_list( var_list: List[str], task: Task, struct_definitions: Dict[str, Struct] ) -> str: @@ -102,21 +150,23 @@ def is_int(string: str) -> bool: return True -def cast_element(string: str) -> Union[str, int, float, bool]: - """Tries to cast the given string to a primitive datatype. +def cast_element(element: Union[str, List]) -> Union[str, int, float, bool]: + """Tries to cast the given string or list to a primitive datatype. Returns: - The casted element if casting was successful, otherwise the input string + The casted element if casting was successful, otherwise the input element """ - if is_int(string): - return int(string) - elif is_float(string): - return float(string) - elif is_boolean(string): - return string == "true" - elif is_string(string): - return string.replace('"', "") - return string + if is_int(element): + return int(element) + elif is_float(element): + return float(element) + elif is_boolean(element): + return element == "true" + elif is_string(element): + return element.replace('"', "") + elif isinstance(element, list) and len(element) == 1: + return element[0] + return element def parse_operator(op: str) -> operator: diff --git a/pfdl_scheduler/utils/parsing_utils.py b/pfdl_scheduler/utils/parsing_utils.py index 56a1454..488f808 100644 --- a/pfdl_scheduler/utils/parsing_utils.py +++ b/pfdl_scheduler/utils/parsing_utils.py @@ -17,19 +17,15 @@ from antlr4.InputStream import InputStream # local sources -from pfdl_scheduler.parser.pfdl_tree_visitor import PFDLTreeVisitor -from pfdl_scheduler.parser.PFDLLexer import PFDLLexer -from pfdl_scheduler.parser.PFDLParser import PFDLParser - -from pfdl_scheduler.validation.error_handler import ErrorHandler -from pfdl_scheduler.validation.syntax_error_listener import SyntaxErrorListener -from pfdl_scheduler.validation.semantic_error_checker import SemanticErrorChecker - +from pfdl_scheduler.pfdl_base_classes import PFDLBaseClasses from pfdl_scheduler.model.process import Process def parse_string( - pfdl_string: str, file_path: str = "", used_in_extension: bool = False + pfdl_string: str, + file_path: str = "", + used_in_extension: bool = False, + pfdl_base_classes: PFDLBaseClasses = PFDLBaseClasses("pfdl_scheduler"), ) -> Tuple[bool, Union[None, Process]]: """Instantiate the ANTLR lexer and parser and parses the given PFDL string. @@ -41,25 +37,27 @@ def parse_string( Returns: A boolan indicating validity of the PFDL file and the process object if so, otherwise None. """ - lexer = PFDLLexer(InputStream(pfdl_string)) + lexer = pfdl_base_classes.get_class("PFDLLexer")(InputStream(pfdl_string)) lexer.removeErrorListeners() token_stream = CommonTokenStream(lexer) - parser = PFDLParser(token_stream) + parser = pfdl_base_classes.get_class("PFDLParser")(token_stream) parser.removeErrorListeners() - error_handler = ErrorHandler(file_path, used_in_extension) - error_listener = SyntaxErrorListener(token_stream, error_handler) + error_handler = pfdl_base_classes.get_class("ErrorHandler")(file_path, used_in_extension) + error_listener = pfdl_base_classes.get_class("SyntaxErrorListener")(token_stream, error_handler) parser.addErrorListener(error_listener) tree = parser.program() if error_handler.has_error() is False: - visitor = PFDLTreeVisitor(error_handler) + visitor = pfdl_base_classes.get_class("PFDLTreeVisitor")(error_handler, pfdl_base_classes) process = visitor.visit(tree) - semantic_error_checker = SemanticErrorChecker(error_handler, process) + semantic_error_checker = pfdl_base_classes.get_class("SemanticErrorChecker")( + error_handler, process, pfdl_base_classes + ) semantic_error_checker.validate_process() if error_handler.has_error() is False: @@ -68,7 +66,9 @@ def parse_string( return (False, None) -def parse_program(program: str) -> Tuple[bool, Union[None, Process], str]: +def parse_program( + program: str, pfdl_base_classes: PFDLBaseClasses = PFDLBaseClasses("pfdl_scheduler") +) -> Tuple[bool, Union[None, Process], str]: """Loads the content of the program from either the given path or the PFDL program directly and calls the parse_string function. Args: @@ -79,7 +79,7 @@ def parse_program(program: str) -> Tuple[bool, Union[None, Process], str]: process object if so, otherwise None. """ pfdl_string, file_path = extract_content_and_file_path(program) - return *parse_string(pfdl_string, file_path), pfdl_string + return *parse_string(pfdl_string, file_path, pfdl_base_classes=pfdl_base_classes), pfdl_string def write_tokens_to_file(token_stream: CommonTokenStream) -> None: diff --git a/pfdl_scheduler/validation/semantic_error_checker.py b/pfdl_scheduler/validation/semantic_error_checker.py index 4e5086b..4af670f 100644 --- a/pfdl_scheduler/validation/semantic_error_checker.py +++ b/pfdl_scheduler/validation/semantic_error_checker.py @@ -13,6 +13,7 @@ from antlr4.ParserRuleContext import ParserRuleContext # local sources +from pfdl_scheduler.model.instance import Instance from pfdl_scheduler.model.process import Process from pfdl_scheduler.model.struct import Struct from pfdl_scheduler.model.array import Array @@ -24,6 +25,7 @@ from pfdl_scheduler.model.while_loop import WhileLoop from pfdl_scheduler.model.condition import Condition +from pfdl_scheduler.pfdl_base_classes import PFDLBaseClasses from pfdl_scheduler.validation.error_handler import ErrorHandler from pfdl_scheduler.utils import helpers @@ -46,7 +48,12 @@ class SemanticErrorChecker: structs: A Dict that contains all Struct objects of the given process object. """ - def __init__(self, error_handler: ErrorHandler, process: Process) -> None: + def __init__( + self, + error_handler: ErrorHandler, + process: Process, + pfdl_base_classes: PFDLBaseClasses = PFDLBaseClasses(), + ) -> None: """Initialize the object. Args: @@ -57,6 +64,7 @@ def __init__(self, error_handler: ErrorHandler, process: Process) -> None: self.process: Process = process self.tasks: Dict[str, Task] = process.tasks self.structs: Dict[str, Struct] = process.structs + self.pfdl_base_classes: PFDLBaseClasses = pfdl_base_classes def validate_process(self) -> bool: """Starts static semantic checks. @@ -65,7 +73,7 @@ def validate_process(self) -> bool: True, if the process has no errors, otherwise False. """ # use & so all methods will be executed even if a method returns False - return self.check_structs() & self.check_tasks() + return self.check_structs() & self.check_tasks() & self.check_instances() # Struct check def check_structs(self) -> bool: @@ -123,6 +131,130 @@ def check_tasks(self) -> bool: return valid + def check_instances(self) -> bool: + """Executes semantic checks for all Instances. + + Returns: + True if all Instances are valid. + """ + valid = True + for instance in self.process.instances.values(): + struct_name = instance.struct_name + struct = None + if struct_name in self.process.structs: + struct = self.process.structs[struct_name] + + # check first if the corresponding Struct exists + if struct is None: + error_msg = ( + f"The Instance '{instance.name}' refers to a struct that does not exist." + ) + self.error_handler.print_error(error_msg, context=instance.context) + valid = False + else: + # execute checks. The second check can only be executed if the previous one succeeded + valid = ( + self.check_if_instance_attributes_exist_in_struct(struct, instance) + and self.check_if_value_matches_with_defined_type(struct, instance) + ) & self.check_if_struct_attributes_are_assigned(struct, instance) + return valid + + def check_if_instance_attributes_exist_in_struct( + self, struct: Struct, instance: Instance + ) -> bool: + """Checks if all attributes in the given Instance exist in the corresponding Struct. + + Returns: + True if all attributes in the given Instance exist in the corresponding Struct. + """ + valid = True + # collect all attributes of the corresponding struct, including attributes of parent structs + struct_attributes = set(self.process.structs[struct.name].attributes.keys()) + + for attribute_name, attribute_value in instance.attributes.items(): + # validate all atributes of this instance + if not attribute_name in struct_attributes: + error_msg = ( + f"The attribute '{attribute_name}' in instance '{instance.name}' " + + "was not defined in the corresponding Struct" + ) + + self.error_handler.print_error(error_msg, context=instance.context) + valid = False + elif isinstance(attribute_value, self.pfdl_base_classes.get_class("Instance")): + # the attribute is an instance so recursively check its attributes + nested_struct = self.process.structs[struct.attributes[attribute_name]] + if not self.check_if_instance_attributes_exist_in_struct( + nested_struct, attribute_value + ): + valid = False + return valid + + def check_if_struct_attributes_are_assigned(self, struct: Struct, instance: Instance) -> bool: + """Checks if all attributes from the corresponding struct are assigned + with values in the instance. + + Returns: True if all attributes of the instance are assigned + """ + valid = True + + struct_attributes = struct.attributes.copy() + + for struct_attribute in struct_attributes: + attribute_found = False + for attribute_name, attribute_value in instance.attributes.items(): + if struct_attribute == attribute_name: + attribute_found = True + + if isinstance(attribute_value, self.pfdl_base_classes.get_class("Instance")): + # the attribute is an instance so recursively check its attributes + nested_struct = self.process.structs[struct.attributes[attribute_name]] + if not self.check_if_struct_attributes_are_assigned( + nested_struct, attribute_value + ): + valid = False + break + if attribute_found is False: + error_msg = ( + f"The attribute '{struct_attribute}' from the corresponding struct was not " + f"definied in instance '{instance.name}'" + ) + self.error_handler.print_error(error_msg, context=instance.context) + valid = False + return valid + + def check_if_value_matches_with_defined_type(self, struct: Struct, instance: Instance) -> bool: + """Checks if the assigned values in the Instance match with the defined type in the Struct. + + Returns: + True if all assigned values in the Instance match + with the defined type in the Struct. + """ + valid = True + for attribute_name, attribute_value in instance.attributes.items(): + struct_attr_type = None + struct_attributes = struct.attributes + + # This method assumes that the attribute exists in the Struct, so no additional check + struct_attr_type = struct_attributes[attribute_name] + + if isinstance(attribute_value, self.pfdl_base_classes.get_class("Instance")): + # the attribute is an instance so recursively check its attributes + nested_struct = self.process.structs[struct_attr_type] + if not self.check_if_value_matches_with_defined_type( + nested_struct, attribute_value + ): + valid = False + + elif not self.check_type_of_value(attribute_value, struct_attr_type): + error_msg = ( + f"The attribute '{attribute_name}' in instance '{instance.name}' has the " + f"wrong type: should be '{struct_attr_type}'." + ) + self.error_handler.print_error(error_msg, context=instance.context) + valid = False + return valid + def check_statements(self, task: Task) -> bool: """Executes semantic checks for all statements in a Task. @@ -145,15 +277,15 @@ def check_statement( Returns: True if the given statement is valid. """ - if isinstance(statement, Service): + if isinstance(statement, self.pfdl_base_classes.get_class("Service")): return self.check_service(statement, task) - if isinstance(statement, TaskCall): + if isinstance(statement, self.pfdl_base_classes.get_class("TaskCall")): return self.check_task_call(statement, task) - if isinstance(statement, Parallel): + if isinstance(statement, self.pfdl_base_classes.get_class("Parallel")): return self.check_parallel(statement, task) - if isinstance(statement, WhileLoop): + if isinstance(statement, self.pfdl_base_classes.get_class("WhileLoop")): return self.check_while_loop(statement, task) - if isinstance(statement, CountingLoop): + if isinstance(statement, self.pfdl_base_classes.get_class("CountingLoop")): return self.check_counting_loop(statement, task) return self.check_conditional_statement(statement, task) @@ -268,7 +400,15 @@ def check_if_task_call_matches_with_called_task(self, task_call: TaskCall, task: for i, (identifier, data_type) in enumerate(task_call.output_parameters.items()): variable_in_called_task = called_task.output_parameters[i] if variable_in_called_task in called_task.variables: - type_of_variable = called_task.variables[variable_in_called_task] + + type_of_variable = "" + if isinstance( + called_task.variables[variable_in_called_task], + self.pfdl_base_classes.get_class("Instance"), + ): + type_of_variable = called_task.variables[variable_in_called_task].struct_name + else: + type_of_variable = called_task.variables[variable_in_called_task] if str(type_of_variable) != str(data_type): error_msg = ( @@ -310,7 +450,12 @@ def check_if_input_parameter_matches( """ if isinstance(input_parameter, str): if input_parameter in task_context.variables: - type_of_variable = task_context.variables[input_parameter] + type_of_variable = "" + variable = task_context.variables[input_parameter] + if isinstance(variable, self.pfdl_base_classes.get_class("Instance")): + type_of_variable = variable.struct_name + else: + type_of_variable = variable # str() because of possible Arrays as # types (we can compare types by converting Array object to string) @@ -344,7 +489,7 @@ def check_if_input_parameter_matches( i = 1 while i < len(input_parameter) - 1: element = current_struct.attributes[input_parameter[i]] - if isinstance(element, Array): + if isinstance(element, self.pfdl_base_classes.get_class("Array")): i = i + 1 current_struct = self.structs[element.type_of_elements] else: @@ -369,7 +514,7 @@ def check_if_input_parameter_matches( off_symbol_length=len(task_call.name), ) return False - elif isinstance(input_parameter, Struct): + elif isinstance(input_parameter, self.pfdl_base_classes.get_class("Struct")): if input_parameter.name != defined_type: error_msg = ( f"Type of TaskCall parameter '{input_parameter.name}' does not match " @@ -432,7 +577,7 @@ def check_call_input_parameters( valid = True for input_parameter in called_entity.input_parameters: - if isinstance(input_parameter, Struct): + if isinstance(input_parameter, self.pfdl_base_classes.get_class("Instance")): if not self.check_instantiated_struct_attributes(input_parameter): valid = False elif isinstance(input_parameter, list): @@ -458,8 +603,12 @@ def check_attribute_access( True if the attribute access is valid. """ variable = variable_list[0] - if variable in task.variables and task.variables[variable] in self.structs: - struct = self.structs[task.variables[variable]] + + if variable in task.variables: + if task.variables[variable].__class__.__name__ == "Instance": + struct = self.structs[task.variables[variable].struct_name] + if task.variables[variable] in self.structs: + struct = self.structs[task.variables[variable]] predecessor = struct for i in range(1, len(variable_list)): attribute = variable_list[i] @@ -511,7 +660,7 @@ def check_call_output_parameters(self, called_entity: Union[Service, TaskCall]) valid = False return valid - def check_instantiated_struct_attributes(self, struct_instance: Struct) -> bool: + def check_instantiated_struct_attributes(self, instance: Instance) -> bool: """Calls multiple check methods to validate an instantiated Struct. Multiple Checks are done: @@ -521,25 +670,26 @@ def check_instantiated_struct_attributes(self, struct_instance: Struct) -> bool: (4) Check if attributes in the instance do not match with attributes in the definition. Args: - struct_instance: The instantiated struct that is checked. + instance: The instantiated struct that is checked. Returns: True if the instantiated Struct is valid. """ valid = True - if self.check_if_struct_exists(struct_instance): - struct_definition = self.structs[struct_instance.name] + if self.check_if_struct_exists(instance): + struct_definition = self.structs[instance.name] - if not self.check_for_missing_attribute_in_struct(struct_instance, struct_definition): + if not self.check_for_missing_attribute_in_struct(instance, struct_definition): valid = False - for identifier in struct_instance.attributes: + # Create a copy of the struct instance attributes and remove default attributes + for identifier in instance.attributes: if not ( self.check_for_unknown_attribute_in_struct( - struct_instance, identifier, struct_definition + instance, identifier, struct_definition ) - and self.check_for_wrong_attribute_type_in_struct( - struct_instance, identifier, struct_definition + and self.check_for_wrong_attribute_type_in_instance( + instance, identifier, struct_definition ) ): valid = False @@ -583,7 +733,7 @@ def check_for_unknown_attribute_in_struct( return False return True - def check_for_wrong_attribute_type_in_struct( + def check_for_wrong_attribute_type_in_instance( self, struct_instance: Struct, identifier: str, struct_definition: Struct ) -> bool: """Calls check methods for the attribute assignments in an instantiated Struct. @@ -603,12 +753,12 @@ def check_for_wrong_attribute_type_in_struct( if isinstance(correct_attribute_type, str): if correct_attribute_type in self.structs: # check for structs which has structs as attribute - if isinstance(attribute, Struct): + if isinstance(attribute, self.pfdl_base_classes.get_class("Struct")): attribute.name = correct_attribute_type struct_def = self.structs[correct_attribute_type] struct_correct = True for identifier in attribute.attributes: - if not self.check_for_wrong_attribute_type_in_struct( + if not self.check_for_wrong_attribute_type_in_instance( attribute, identifier, struct_def ): struct_correct = False @@ -628,10 +778,10 @@ def check_for_wrong_attribute_type_in_struct( self.error_handler.print_error(error_msg, context=struct_instance.context) return False - elif isinstance(correct_attribute_type, Array): - if not isinstance(attribute, Array) or not self.check_array( - attribute, correct_attribute_type - ): + elif isinstance(correct_attribute_type, self.pfdl_base_classes.get_class("Array")): + if not isinstance( + attribute, self.pfdl_base_classes.get_class("Array") + ) or not self.check_array(attribute, correct_attribute_type): error_msg = ( f"Attribute '{identifier}' has the wrong type in the instantiated" f" Struct '{struct_instance.name}', expected 'Array'" @@ -650,7 +800,7 @@ def check_array(self, instantiated_array: Array, array_definition: Array) -> boo element_type = array_definition.type_of_elements for value in instantiated_array.values: # type of Struct not checked yet - if isinstance(value, Struct): + if isinstance(value, self.pfdl_base_classes.get_class("Struct")): if value.name == "": value.name = array_definition.type_of_elements if not self.check_instantiated_struct_attributes(value): @@ -710,9 +860,11 @@ def check_counting_loop(self, counting_loop: CountingLoop, task: Task) -> bool: True if the Counting Loop statement is valid. """ if counting_loop.parallel: - if len(counting_loop.statements) == 1 and isinstance(counting_loop.statements[0], TaskCall): + if len(counting_loop.statements) == 1 and isinstance( + counting_loop.statements[0], self.pfdl_base_classes.get_class("TaskCall") + ): return True - error_msg = "Only a single task is allowed in a parallel loop statement!" + error_msg = "Only a single task call is allowed in a parallel loop statement!" self.error_handler.print_error(error_msg, context=counting_loop.context) return False else: @@ -879,7 +1031,7 @@ def check_if_variable_definition_is_valid( if isinstance(variable_type, str): if not self.variable_type_exists(variable_type): valid = False - elif isinstance(variable_type, Array): + elif isinstance(variable_type, self.pfdl_base_classes.get_class("Array")): element_type = variable_type.type_of_elements if not self.variable_type_exists(element_type): valid = False @@ -936,7 +1088,7 @@ def check_type_of_value(self, value: Any, value_type: str) -> bool: return isinstance(value, bool) if value_type == "string": return isinstance(value, str) - if isinstance(value, Struct): + if isinstance(value, self.pfdl_base_classes.get_class("Struct")): return value.name == value_type # value was a string return True diff --git a/setup.py b/setup.py index 41a8933..16e53af 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ setup( name="pfdl_scheduler", - version="0.9.0", + version="0.9.2", description="Parser and Scheduler for Production Flow Description Language (PFDL) files.", long_description=long_description, long_description_content_type='text/markdown', diff --git a/tests/unit_test/model/test_condition.py b/tests/unit_test/model/test_condition.py index 507d190..7425caa 100644 --- a/tests/unit_test/model/test_condition.py +++ b/tests/unit_test/model/test_condition.py @@ -32,7 +32,9 @@ def test_init(self): context = ParserRuleContext() condition = Condition({"a": 1}, [Service("service")], [TaskCall("task")], context=context) self.assertEqual(condition.expression, {"a": 1}) - self.assertEqual(condition.passed_stmts, [Service("service")]) + self.assertEqual(1, len(condition.passed_stmts)) + self.assertIsInstance(condition.passed_stmts[0], Service) + self.assertEqual("service", condition.passed_stmts[0].name) self.assertEqual(condition.failed_stmts, [TaskCall("task")]) self.assertEqual(condition.context, context) self.assertEqual(condition.context_dict, {}) diff --git a/tests/unit_test/model/test_struct.py b/tests/unit_test/model/test_struct.py index 392dfb5..283b98e 100644 --- a/tests/unit_test/model/test_struct.py +++ b/tests/unit_test/model/test_struct.py @@ -71,7 +71,9 @@ def test_struct_from_json(self): context = ParserRuleContext() json_string = '{"attr1": "value1", "attr2": [1, 2, 3], "attr3": {"attr4": "value4"}}' - struct = Struct.from_json(json_string, ErrorHandler("", False), context) + struct = Struct.from_json( + json_string, ErrorHandler("", False), context, struct_class=Struct + ) self.assertEqual(struct.name, "") attributes = { @@ -88,7 +90,7 @@ def test_parse_json(self): context = ParserRuleContext() # empty - struct = parse_json({}, ErrorHandler("", False), context) + struct = parse_json({}, ErrorHandler("", False), context, Struct) self.assertEqual(struct.name, "") self.assertEqual(struct.attributes, {}) self.assertEqual(struct.context, context) @@ -97,7 +99,10 @@ def test_parse_json(self): # simple attributes attributes = {"attr1": "value1", "attr2": 123, "attr3": True} struct = parse_json( - {"attr1": "value1", "attr2": 123, "attr3": True}, ErrorHandler("", False), context + {"attr1": "value1", "attr2": 123, "attr3": True}, + ErrorHandler("", False), + context, + Struct, ) self.assertEqual(struct.name, "") @@ -114,7 +119,7 @@ def test_parse_json(self): "attr7": {"attr8": {"attr9": True}}, "attr10": [True, True, False], } - struct = parse_json(struct_dict, ErrorHandler("", False), None) + struct = parse_json(struct_dict, ErrorHandler("", False), None, Struct) self.assertEqual(struct.name, "") self.assertEqual( struct.attributes, @@ -144,7 +149,7 @@ def test_parse_json(self): }, ] } - struct = parse_json(struct_dict, ErrorHandler("", False), None) + struct = parse_json(struct_dict, ErrorHandler("", False), None, Struct) self.assertEqual(struct.name, "") array = Array("", [Struct("", {"attr2": 5}), Struct("", {"attr3": "string"})]) self.assertEqual(struct.attributes, {"attr1": array}) diff --git a/tests/unit_test/model/test_task.py b/tests/unit_test/model/test_task.py index 69f046d..7311aad 100644 --- a/tests/unit_test/model/test_task.py +++ b/tests/unit_test/model/test_task.py @@ -42,10 +42,10 @@ def test_init(self): ) self.assertEqual(task.name, "task1") self.assertEqual(len(task.statements), 4) - self.assertEqual(task.statements[0], Service()) + self.assertIsInstance(task.statements[0], Service) self.assertEqual(task.statements[1], CountingLoop()) self.assertEqual(task.statements[2], CountingLoop()) - self.assertEqual(task.statements[3], Service()) + self.assertIsInstance(task.statements[3], Service) self.assertEqual(task.variables, {"var1": "val1", "var2": "val2"}) self.assertEqual(task.input_parameters, {"in1": "val4", "in2": "val5"}) self.assertEqual(task.output_parameters, ["out1", "out2"]) diff --git a/tests/unit_test/test_pfdl_base_classes.py b/tests/unit_test/test_pfdl_base_classes.py new file mode 100644 index 0000000..e66608d --- /dev/null +++ b/tests/unit_test/test_pfdl_base_classes.py @@ -0,0 +1,94 @@ +# Copyright The PFDL Contributors +# +# Licensed under the MIT License. +# For details on the licensing terms, see the LICENSE file. +# SPDX-License-Identifier: MIT + +"""Contains unit tests for the PFDLBaseClasses class.""" + +import unittest +from unittest.mock import patch, MagicMock +from pfdl_scheduler.pfdl_base_classes import PFDLBaseClasses + + +class TestPFDLBaseClasses(unittest.TestCase): + """Test the PFDLBaseClasses class.""" + + def test_register_class(self): + pfdl_base_classes = PFDLBaseClasses() + mock_class = type("MockClass", (object,), {}) + + pfdl_base_classes.register_class("MockClass", mock_class) + + # Verify that the class is registered + self.assertEqual(pfdl_base_classes.get_class("MockClass"), mock_class) + + def test_get_class_default(self): + pfdl_base_classes = PFDLBaseClasses() + mock_class = type("MockClass", (object,), {}) + + # Simulate default class registration + pfdl_base_classes._default_classes["MockClass"] = "module.MockClass" + + with patch( + "pfdl_scheduler.pfdl_base_classes.importlib.import_module" + ) as mock_import_module: + mock_import_module.return_value = MagicMock(MockClass=mock_class) + + # Verify that the default class can be retrieved + self.assertEqual(pfdl_base_classes.get_class("MockClass"), mock_class) + + def test_get_instance(self): + pfdl_base_classes = PFDLBaseClasses() + mock_class = type( + "MockClass", (object,), {"__init__": lambda self, x: setattr(self, "x", x)} + ) + + pfdl_base_classes.register_class("MockClass", mock_class) + + # Verify instance creation and reuse + instance = pfdl_base_classes.get_instance("MockClass", 5) + self.assertEqual(instance.x, 5) + instance2 = pfdl_base_classes.get_instance("MockClass") + self.assertIs(instance, instance2) + + def test_clear_registry(self): + pfdl_base_classes = PFDLBaseClasses() + mock_class = type("MockClass", (object,), {}) + + pfdl_base_classes.register_class("MockClass", mock_class) + pfdl_base_classes.get_instance("MockClass") + + # Clear registry and instances + pfdl_base_classes.clear_registry() + + # Verify that both registry and instances are cleared + self.assertNotIn("MockClass", pfdl_base_classes._class_registry) + self.assertNotIn("MockClass", pfdl_base_classes._class_instances) + + def test_module_name_from_path(self): + pfdl_base_classes = PFDLBaseClasses() + module_name = pfdl_base_classes._module_name_from_path("fake_dir/module.py") + self.assertEqual(module_name, "fake_dir.module") + + pfdl_base_classes = PFDLBaseClasses() + module_name = pfdl_base_classes._module_name_from_path("/fake_dir/module.py") + self.assertEqual(module_name, "fake_dir.module") + + @patch("pfdl_scheduler.pfdl_base_classes.importlib.import_module") + def test_get_default_class(self, mock_import_module): + pfdl_base_classes = PFDLBaseClasses() + mock_class = type("MockClass", (object,), {}) + pfdl_base_classes._default_classes["MockClass"] = "module.MockClass" + + mock_import_module.return_value = MagicMock(MockClass=mock_class) + + # Verify default class retrieval + self.assertEqual(pfdl_base_classes._get_default_class("MockClass"), mock_class) + + def test_get_default_class_nonexistent(self): + pfdl_base_classes = PFDLBaseClasses() + with self.assertRaises(ValueError) as context: + pfdl_base_classes._get_default_class("NonExistentClass") + + self.assertEqual(str(context.exception), "Default class for 'NonExistentClass' not found.") diff --git a/tests/unit_test/test_pfdl_tree_visitor.py b/tests/unit_test/test_pfdl_tree_visitor.py index 6ad61ba..3f3f830 100644 --- a/tests/unit_test/test_pfdl_tree_visitor.py +++ b/tests/unit_test/test_pfdl_tree_visitor.py @@ -14,6 +14,7 @@ import unittest from unittest.mock import MagicMock from unittest.mock import patch +from pfdl_scheduler.model.instance import Instance from pfdl_scheduler.model.parallel import Parallel @@ -145,8 +146,8 @@ def test_visit_struct(self): def test_visit_task(self): task_in_context = PFDLParser.Task_inContext(None) task_out_context = PFDLParser.Task_outContext(None) - statement_context_1 = PFDLParser.StatementContext(None) - statement_context_2 = PFDLParser.StatementContext(None) + statement_context_1 = PFDLParser.TaskStatementContext(None) + statement_context_2 = PFDLParser.TaskStatementContext(None) task_context = PFDLParser.TaskContext(None) task_context.children = [ @@ -175,7 +176,7 @@ def test_visit_task(self): ) as mock_2: with patch.object( self.visitor, - "visitStatement", + "visitTaskStatement", MagicMock(side_effect=[statement_1, statement_2]), ) as mock_3: task = self.visitor.visitTask(task_context) @@ -944,6 +945,38 @@ def test_visitUnOperation(self): un_op = self.visitor.visitUnOperation(un_op_context) self.assertEqual(un_op, "!") + def test_visitInstance(self): + instance_context = PFDLParser.InstanceContext(None) + + instance_context.children = [ + PFDLParser.Struct_idContext(None), + PFDLParser.Attribute_assignmentContext(None), + PFDLParser.Attribute_assignmentContext(None), + ] + create_and_add_token(PFDLParser.STARTS_WITH_LOWER_C_STR, "instance_id", instance_context) + with patch.object( + PFDLTreeVisitor, + "visitStruct_id", + MagicMock(side_effect=["struct_id"]), + ): + with patch.object( + PFDLTreeVisitor, + "visitAttribute_assignment", + MagicMock(side_effect=[("attr", "value"), ("attr_2", {"id": "value"})]), + ): + with patch.object( + Instance, + "from_json", + MagicMock(side_effect=[Instance(attributes={"id": "value"})]), + ): + instance = self.visitor.visitInstance(instance_context) + + self.assertIsNotNone(instance) + self.assertEqual(instance.name, "instance_id") + self.assertEqual(instance.struct_name, "struct_id") + self.assertEqual(len(instance.attributes), 2) + self.assertTrue(isinstance(instance.attributes["attr_2"], Instance)) + def create_and_add_token( token_type: int, token_text: str, antlr_context: ParserRuleContext diff --git a/tests/unit_test/test_plugin_loader.py b/tests/unit_test/test_plugin_loader.py new file mode 100644 index 0000000..4ea7ce8 --- /dev/null +++ b/tests/unit_test/test_plugin_loader.py @@ -0,0 +1,39 @@ +# Copyright The PFDL Contributors +# +# Licensed under the MIT License. +# For details on the licensing terms, see the LICENSE file. +# SPDX-License-Identifier: MIT + +"""Contains unit tests for the PluginLoader class.""" + +import unittest +import os +from unittest.mock import patch, MagicMock +from pfdl_scheduler.plugins.plugin_loader import PluginLoader, base_class, apply_plugin_to_base + + +class TestPluginLoader(unittest.TestCase): + """Test the PluginLoader class.""" + + def test_apply_plugin_to_base(self): + class BaseClass: + def method(self): + return "base" + + @base_class("BaseClass") + class PluginClass: + def method(self): + return "plugin" + + combined_class = apply_plugin_to_base(BaseClass, PluginClass) + instance = combined_class() + + # Check if method is overridden correctly + self.assertEqual(instance.method(), "plugin") + + def test_class_name_to_property_name(self): + plugin_loader = PluginLoader() + property_name = plugin_loader._class_name_to_property_name("TestClassName") + + # Check if conversion is correct + self.assertEqual(property_name, "test_class_name_class") diff --git a/tests/unit_test/test_semantic_error_checker.py b/tests/unit_test/test_semantic_error_checker.py index 607d128..d2c02e8 100644 --- a/tests/unit_test/test_semantic_error_checker.py +++ b/tests/unit_test/test_semantic_error_checker.py @@ -16,7 +16,8 @@ from typing import Dict from pfdl_scheduler.model.condition import Condition import unittest -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch +from pfdl_scheduler.model.instance import Instance from pfdl_scheduler.model.parallel import Parallel # local sources @@ -233,6 +234,96 @@ def test_check_tasks(self): self.check_if_print_error_is_called(self.execute_check_tasks, False, False, False) + def test_check_instances(self): + empty_instances_valid = self.checker.check_instances() + self.assertTrue(empty_instances_valid) + + test_instance = Instance("testInstance", struct_name="TestStruct") + test_struct = Struct("TestStruct") + self.process.instances = {"testInstance": test_instance} + self.process.structs = {"TestStruct": test_struct} + + # test valid case + with patch.object( + SemanticErrorChecker, + "check_if_instance_attributes_exist_in_struct", + MagicMock(side_effect=[True]), + ): + with patch.object( + SemanticErrorChecker, + "check_if_value_matches_with_defined_type", + MagicMock(side_effect=[True]), + ): + with patch.object( + SemanticErrorChecker, + "check_if_struct_attributes_are_assigned", + MagicMock(side_effect=[True]), + ): + is_instance_valid = self.checker.check_instances() + + self.assertTrue(is_instance_valid) + + # test invalid cases + with patch.object( + SemanticErrorChecker, + "check_if_instance_attributes_exist_in_struct", + MagicMock(side_effect=[False]), + ): + with patch.object( + SemanticErrorChecker, + "check_if_value_matches_with_defined_type", + MagicMock(side_effect=[True]), + ) as value_matches_mock: + with patch.object( + SemanticErrorChecker, + "check_if_struct_attributes_are_assigned", + MagicMock(side_effect=[True]), + ) as struct_attributes_assigned_mock: + is_instance_valid = self.checker.check_instances() + + self.assertFalse(is_instance_valid) + value_matches_mock.assert_not_called() + struct_attributes_assigned_mock.assert_called() + + with patch.object( + SemanticErrorChecker, + "check_if_instance_attributes_exist_in_struct", + MagicMock(side_effect=[True]), + ): + with patch.object( + SemanticErrorChecker, + "check_if_value_matches_with_defined_type", + MagicMock(side_effect=[False]), + ) as value_matches_mock: + with patch.object( + SemanticErrorChecker, + "check_if_struct_attributes_are_assigned", + MagicMock(side_effect=[True]), + ) as struct_attributes_assigned_mock: + is_instance_valid = self.checker.check_instances() + + self.assertFalse(is_instance_valid) + struct_attributes_assigned_mock.assert_called() + + with patch.object( + SemanticErrorChecker, + "check_if_instance_attributes_exist_in_struct", + MagicMock(side_effect=[True]), + ): + with patch.object( + SemanticErrorChecker, + "check_if_value_matches_with_defined_type", + MagicMock(side_effect=[True]), + ) as value_matches_mock: + with patch.object( + SemanticErrorChecker, + "check_if_struct_attributes_are_assigned", + MagicMock(side_effect=[False]), + ) as struct_attributes_assigned_mock: + is_instance_valid = self.checker.check_instances() + + self.assertFalse(is_instance_valid) + def test_check_statements(self): dummy_task = Task() dummy_task.statements = [Service()] @@ -797,8 +888,8 @@ def test_check_call_input_parameters(self): self.assertTrue(self.checker.check_call_input_parameters(service, task_context)) self.assertTrue(self.checker.check_call_input_parameters(task_call, task_context)) - # input parameter is struct - service.input_parameters = [Struct()] + # input parameter is instance + service.input_parameters = [Instance()] args = ( "check_instantiated_struct_attributes", @@ -820,7 +911,7 @@ def test_check_call_input_parameters(self): ) self.assertFalse(self.check_method(*args)) - task_call.input_parameters = [Struct()] + task_call.input_parameters = [Instance()] args = ( "check_instantiated_struct_attributes", True, @@ -841,7 +932,7 @@ def test_check_call_input_parameters(self): ) self.assertFalse(self.check_method(*args)) - service.input_parameters = [Struct(), Struct(), Struct()] + service.input_parameters = [Instance(), Instance(), Instance()] args = ( "check_instantiated_struct_attributes", True, @@ -921,12 +1012,12 @@ def test_check_call_input_parameters(self): self.checker.check_call_input_parameters, service, task_context ) - task_context.variables = {"test": Struct()} + task_context.variables = {"test": Instance()} self.assertTrue(self.checker.check_call_input_parameters(service, task_context)) self.assertTrue(self.checker.check_call_input_parameters(task_call, task_context)) # mix of input parameters - service.input_parameters = ["test", [], Struct()] + service.input_parameters = ["test", [], Instance()] with patch.object( self.checker, "check_instantiated_struct_attributes", return_value=True ) as mock_1: @@ -1132,7 +1223,7 @@ def test_check_instantiated_struct_attributes(self): ) mock_3.assert_called_once() with patch.object( - self.checker, "check_for_wrong_attribute_type_in_struct", return_value=True + self.checker, "check_for_wrong_attribute_type_in_instance", return_value=True ) as mock_3: self.assertTrue( self.checker.check_instantiated_struct_attributes(instantiated_struct) @@ -1153,7 +1244,7 @@ def test_check_instantiated_struct_attributes(self): ) self.assertEqual(mock_3.call_count, 3) with patch.object( - self.checker, "check_for_wrong_attribute_type_in_struct", return_value=False + self.checker, "check_for_wrong_attribute_type_in_instance", return_value=False ) as mock_3: self.assertFalse( self.checker.check_instantiated_struct_attributes(instantiated_struct) @@ -1219,7 +1310,7 @@ def test_check_for_unknown_attribute_in_struct(self): struct_definition, ) - def test_check_for_wrong_attribute_type_in_struct(self): + def test_check_for_wrong_attribute_type_in_instance(self): struct_definition = Struct() struct_definition.name = "Test" instantiated_struct = Struct() @@ -1228,54 +1319,54 @@ def test_check_for_wrong_attribute_type_in_struct(self): # type is string struct_definition.attributes = {"identifier_1": "string"} instantiated_struct.attributes = {"identifier_1": "a string"} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertTrue(check_result) instantiated_struct.attributes = {"identifier_1": 5} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": Array()} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": True} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": Struct()} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, @@ -1284,54 +1375,54 @@ def test_check_for_wrong_attribute_type_in_struct(self): # type is number struct_definition.attributes = {"identifier_1": "number"} instantiated_struct.attributes = {"identifier_1": 5} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertTrue(check_result) instantiated_struct.attributes = {"identifier_1": "a string"} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": Array()} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": True} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": Struct()} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, @@ -1340,54 +1431,54 @@ def test_check_for_wrong_attribute_type_in_struct(self): # type is boolean struct_definition.attributes = {"identifier_1": "boolean"} instantiated_struct.attributes = {"identifier_1": True} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertTrue(check_result) instantiated_struct.attributes = {"identifier_1": "a string"} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": Array()} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": 5} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": Struct()} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, @@ -1396,54 +1487,54 @@ def test_check_for_wrong_attribute_type_in_struct(self): # type is Array struct_definition.attributes = {"identifier_1": Array()} instantiated_struct.attributes = {"identifier_1": Array()} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertTrue(check_result) instantiated_struct.attributes = {"identifier_1": "a string"} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": 5} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": True} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, ) instantiated_struct.attributes = {"identifier_1": Struct()} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "identifier_1", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "identifier_1", struct_definition, @@ -1468,55 +1559,55 @@ def test_check_for_wrong_attribute_type_in_struct(self): self.process.structs["NestedStruct_1"] = struct_definition_2 self.process.structs["NestedStruct_2"] = struct_definition_3 - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "nested_struct", struct_definition ) self.assertTrue(check_result) instantiated_struct.attributes = {"nested_struct": "not_a_struct_name"} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "nested_struct", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "nested_struct", struct_definition, ) instantiated_struct.attributes = {"nested_struct": Array()} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "nested_struct", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "nested_struct", struct_definition, ) instantiated_struct.attributes = {"nested_struct": True} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "nested_struct", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "nested_struct", struct_definition, ) instantiated_struct.attributes = {"nested_struct": 5} - check_result = self.checker.check_for_wrong_attribute_type_in_struct( + check_result = self.checker.check_for_wrong_attribute_type_in_instance( instantiated_struct, "nested_struct", struct_definition ) self.assertFalse(check_result) self.check_if_print_error_is_called( - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "nested_struct", struct_definition, @@ -1524,10 +1615,10 @@ def test_check_for_wrong_attribute_type_in_struct(self): instantiated_struct.attributes = {"nested_struct": nested_struct} args = ( - "check_for_wrong_attribute_type_in_struct", + "check_for_wrong_attribute_type_in_instance", True, 1, - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "nested_struct", struct_definition, @@ -1535,10 +1626,10 @@ def test_check_for_wrong_attribute_type_in_struct(self): self.assertTrue(self.check_method(*args)) args = ( - "check_for_wrong_attribute_type_in_struct", + "check_for_wrong_attribute_type_in_instance", False, 1, - self.checker.check_for_wrong_attribute_type_in_struct, + self.checker.check_for_wrong_attribute_type_in_instance, instantiated_struct, "nested_struct", struct_definition,