Source code for iottsbulk.models.job_status

# coding: utf-8

"""
    IoT Time Series Bulk API

    This API allows to bulk import IoT time series data based on files uploaded via IoT File Service. Data import for simulation assets (entities) is supported with up to nano second precision and for performance assets (entities) with up to milli second precision. A bulk import is modeled as asynchronous job whose status can be retrieved after creation. Successfully imported time series data can be retrieved using the read operation.   # noqa: E501
"""


import pprint
import re
import six
from mindsphere_core.exceptions import MindsphereClientError


[docs]class JobStatus(object): """ Attributes: attribute_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ attribute_types = { 'job_id': 'str', 'status': 'str', 'message': 'str', 'job_start_time': 'datetime', 'job_last_modified': 'datetime' } attribute_map = { 'job_id': 'jobId', 'status': 'status', 'message': 'message', 'job_start_time': 'jobStartTime', 'job_last_modified': 'jobLastModified' } def __init__(self, job_id=None, status=None, message=None, job_start_time=None, job_last_modified=None): self._job_id = job_id self._status = status self._message = message self._job_start_time = job_start_time self._job_last_modified = job_last_modified self.discriminator = None @property def job_id(self): """Gets the job_id of this JobStatus. Unique import job ID. :return: The job_id of this JobStatus. :rtype: str """ return self._job_id @job_id.setter def job_id(self, job_id): """Sets the job_id of this JobStatus. Unique import job ID. :param job_id: The job_id of this JobStatus. :type: str """ self._job_id = job_id @property def status(self): """Gets the status of this JobStatus. Status of bulk ingest job. - SUBMITTED: job has been created but import to time series store not yet started. - IN_PROGRESS: import of data has been started. Subsets of the data might already be available to be retrieved. - FAILED: time series import has failed. No data is available to be retrieved. - SUCCESS: time series data import has been successfully finished. :return: The status of this JobStatus. :rtype: str """ return self._status @status.setter def status(self, status): """Sets the status of this JobStatus. Status of bulk ingest job. - SUBMITTED: job has been created but import to time series store not yet started. - IN_PROGRESS: import of data has been started. Subsets of the data might already be available to be retrieved. - FAILED: time series import has failed. No data is available to be retrieved. - SUCCESS: time series data import has been successfully finished. :param status: The status of this JobStatus. :type: str """ allowed_values = ["SUBMITTED", "IN_PROGRESS", "ERROR", "SUCCESS"] if status.lower() not in [x.lower() for x in allowed_values]: raise MindsphereClientError( "Invalid value for `status` ({0}), must be one of {1}" .format(status, allowed_values) ) self._status = status @property def message(self): """Gets the message of this JobStatus. Contains an error message in case the job status equals FAILED. Possible error messages: - The input files are having data for more than one hour. - The input files are having duplicate data based on time interval. - The input files are having overlapping data based on time interval. - Input data is having invalid file format. - The input contains data for multiple combinations of asset (entity) and aspect (property set) names. - The input 'from' time and actual 'from' time of file does not match. - The input 'to' time and actual 'to' time of file does not match. - The input files contain variables that are not part of the aspect (property set). - The timestamp in input file is not formatted as per ISO time format. - The input property value in input file is not of valid type as per property type. - Error in bulk ingestion: for input file {0}, input time {1} is not acceptable. - Error in bulk ingestion: for input file {0}, invalid time {1} is given as input, up to milli-second time precision is allowed for performance asset (entity). - Error in bulk ingestion: given input File {0}, is an invalid JSON file. :return: The message of this JobStatus. :rtype: str """ return self._message @message.setter def message(self, message): """Sets the message of this JobStatus. Contains an error message in case the job status equals FAILED. Possible error messages: - The input files are having data for more than one hour. - The input files are having duplicate data based on time interval. - The input files are having overlapping data based on time interval. - Input data is having invalid file format. - The input contains data for multiple combinations of asset (entity) and aspect (property set) names. - The input 'from' time and actual 'from' time of file does not match. - The input 'to' time and actual 'to' time of file does not match. - The input files contain variables that are not part of the aspect (property set). - The timestamp in input file is not formatted as per ISO time format. - The input property value in input file is not of valid type as per property type. - Error in bulk ingestion: for input file {0}, input time {1} is not acceptable. - Error in bulk ingestion: for input file {0}, invalid time {1} is given as input, up to milli-second time precision is allowed for performance asset (entity). - Error in bulk ingestion: given input File {0}, is an invalid JSON file. :param message: The message of this JobStatus. :type: str """ self._message = message @property def job_start_time(self): """Gets the job_start_time of this JobStatus. Start time of data import in ISO date format. :return: The job_start_time of this JobStatus. :rtype: datetime """ return self._job_start_time @job_start_time.setter def job_start_time(self, job_start_time): """Sets the job_start_time of this JobStatus. Start time of data import in ISO date format. :param job_start_time: The job_start_time of this JobStatus. :type: datetime """ self._job_start_time = job_start_time @property def job_last_modified(self): """Gets the job_last_modified of this JobStatus. Job last modified time in ISO date format. The backend updates this time whenever the message changes. :return: The job_last_modified of this JobStatus. :rtype: datetime """ return self._job_last_modified @job_last_modified.setter def job_last_modified(self, job_last_modified): """Sets the job_last_modified of this JobStatus. Job last modified time in ISO date format. The backend updates this time whenever the message changes. :param job_last_modified: The job_last_modified of this JobStatus. :type: datetime """ self._job_last_modified = job_last_modified
[docs] def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.attribute_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(JobStatus, dict): for key, value in self.items(): result[key] = value return result
[docs] def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict())
def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, JobStatus): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other