[docs]classSpark(Catalog):"""Description"""type_name:str=Field(default="Spark",allow_mutation=False)@validator("type_name")defvalidate_type_name(cls,v):ifv!="Spark":raiseValueError("must be Spark")returnvdef__setattr__(self,name,value):ifnameinSpark._convenience_properties:returnobject.__setattr__(self,name,value)super().__setattr__(name,value)SPARK_RUN_VERSION:ClassVar[KeywordField]=KeywordField("sparkRunVersion","sparkRunVersion")""" Spark Version for the Spark Job run eg. 3.4.1 """SPARK_RUN_OPEN_LINEAGE_VERSION:ClassVar[KeywordField]=KeywordField("sparkRunOpenLineageVersion","sparkRunOpenLineageVersion")""" OpenLineage Version of the Spark Job run eg. 1.1.0 """SPARK_RUN_START_TIME:ClassVar[NumericField]=NumericField("sparkRunStartTime","sparkRunStartTime")""" Start time of the Spark Job eg. 1695673598218 """SPARK_RUN_END_TIME:ClassVar[NumericField]=NumericField("sparkRunEndTime","sparkRunEndTime")""" End time of the Spark Job eg. 1695673598218 """SPARK_RUN_OPEN_LINEAGE_STATE:ClassVar[KeywordField]=KeywordField("sparkRunOpenLineageState","sparkRunOpenLineageState")""" OpenLineage state of the Spark Job run eg. COMPLETE """_convenience_properties:ClassVar[List[str]]=["spark_run_version","spark_run_open_lineage_version","spark_run_start_time","spark_run_end_time","spark_run_open_lineage_state",]@propertydefspark_run_version(self)->Optional[str]:returnNoneifself.attributesisNoneelseself.attributes.spark_run_version@spark_run_version.setterdefspark_run_version(self,spark_run_version:Optional[str]):ifself.attributesisNone:self.attributes=self.Attributes()self.attributes.spark_run_version=spark_run_version@propertydefspark_run_open_lineage_version(self)->Optional[str]:return(Noneifself.attributesisNoneelseself.attributes.spark_run_open_lineage_version)@spark_run_open_lineage_version.setterdefspark_run_open_lineage_version(self,spark_run_open_lineage_version:Optional[str]):ifself.attributesisNone:self.attributes=self.Attributes()self.attributes.spark_run_open_lineage_version=spark_run_open_lineage_version@propertydefspark_run_start_time(self)->Optional[datetime]:returnNoneifself.attributesisNoneelseself.attributes.spark_run_start_time@spark_run_start_time.setterdefspark_run_start_time(self,spark_run_start_time:Optional[datetime]):ifself.attributesisNone:self.attributes=self.Attributes()self.attributes.spark_run_start_time=spark_run_start_time@propertydefspark_run_end_time(self)->Optional[datetime]:returnNoneifself.attributesisNoneelseself.attributes.spark_run_end_time@spark_run_end_time.setterdefspark_run_end_time(self,spark_run_end_time:Optional[datetime]):ifself.attributesisNone:self.attributes=self.Attributes()self.attributes.spark_run_end_time=spark_run_end_time@propertydefspark_run_open_lineage_state(self)->Optional[OpenLineageRunState]:return(Noneifself.attributesisNoneelseself.attributes.spark_run_open_lineage_state)@spark_run_open_lineage_state.setterdefspark_run_open_lineage_state(self,spark_run_open_lineage_state:Optional[OpenLineageRunState]):ifself.attributesisNone:self.attributes=self.Attributes()self.attributes.spark_run_open_lineage_state=spark_run_open_lineage_stateclassAttributes(Catalog.Attributes):spark_run_version:Optional[str]=Field(default=None,description="")spark_run_open_lineage_version:Optional[str]=Field(default=None,description="")spark_run_start_time:Optional[datetime]=Field(default=None,description="")spark_run_end_time:Optional[datetime]=Field(default=None,description="")spark_run_open_lineage_state:Optional[OpenLineageRunState]=Field(default=None,description="")attributes:Spark.Attributes=Field(default_factory=lambda:Spark.Attributes(),description=("Map of attributes in the instance and their values. ""The specific keys of this map will vary by type, ""so are described in the sub-types of this schema."),)