[docs]classSparkJob(Spark):"""Description"""type_name:str=Field(default="SparkJob",allow_mutation=False)@validator("type_name")defvalidate_type_name(cls,v):ifv!="SparkJob":raiseValueError("must be SparkJob")returnvdef__setattr__(self,name,value):ifnameinSparkJob._convenience_properties:returnobject.__setattr__(self,name,value)super().__setattr__(name,value)SPARK_APP_NAME:ClassVar[KeywordTextField]=KeywordTextField("sparkAppName","sparkAppName.keyword","sparkAppName")""" Name of the Spark app containing this Spark Job For eg. extract_raw_data """SPARK_MASTER:ClassVar[KeywordField]=KeywordField("sparkMaster","sparkMaster")""" The Spark master URL eg. local, local[4], or spark://master:7077 """OUTPUTS:ClassVar[RelationField]=RelationField("outputs")""" TBC """INPUTS:ClassVar[RelationField]=RelationField("inputs")""" TBC """PROCESS:ClassVar[RelationField]=RelationField("process")""" TBC """_convenience_properties:ClassVar[List[str]]=["spark_app_name","spark_master","outputs","inputs","process",]@propertydefspark_app_name(self)->Optional[str]:returnNoneifself.attributesisNoneelseself.attributes.spark_app_name@spark_app_name.setterdefspark_app_name(self,spark_app_name:Optional[str]):ifself.attributesisNone:self.attributes=self.Attributes()self.attributes.spark_app_name=spark_app_name@propertydefspark_master(self)->Optional[str]:returnNoneifself.attributesisNoneelseself.attributes.spark_master@spark_master.setterdefspark_master(self,spark_master:Optional[str]):ifself.attributesisNone:self.attributes=self.Attributes()self.attributes.spark_master=spark_master@propertydefoutputs(self)->Optional[List[Catalog]]:returnNoneifself.attributesisNoneelseself.attributes.outputs@outputs.setterdefoutputs(self,outputs:Optional[List[Catalog]]):ifself.attributesisNone:self.attributes=self.Attributes()self.attributes.outputs=outputs@propertydefinputs(self)->Optional[List[Catalog]]:returnNoneifself.attributesisNoneelseself.attributes.inputs@inputs.setterdefinputs(self,inputs:Optional[List[Catalog]]):ifself.attributesisNone:self.attributes=self.Attributes()self.attributes.inputs=inputs@propertydefprocess(self)->Optional[Process]:returnNoneifself.attributesisNoneelseself.attributes.process@process.setterdefprocess(self,process:Optional[Process]):ifself.attributesisNone:self.attributes=self.Attributes()self.attributes.process=processclassAttributes(Spark.Attributes):spark_app_name:Optional[str]=Field(default=None,description="")spark_master:Optional[str]=Field(default=None,description="")outputs:Optional[List[Catalog]]=Field(default=None,description="")# relationshipinputs:Optional[List[Catalog]]=Field(default=None,description="")# relationshipprocess:Optional[Process]=Field(default=None,description="")# relationshipattributes:SparkJob.Attributes=Field(default_factory=lambda:SparkJob.Attributes(),description=("Map of attributes in the instance and their values. ""The specific keys of this map will vary by type, ""so are described in the sub-types of this schema."),)