Batch.fromJson constructor
Batch.fromJson(
- Map json_
Implementation
Batch.fromJson(core.Map json_)
: this(
createTime: json_['createTime'] as core.String?,
creator: json_['creator'] as core.String?,
environmentConfig: json_.containsKey('environmentConfig')
? EnvironmentConfig.fromJson(json_['environmentConfig']
as core.Map<core.String, core.dynamic>)
: null,
labels:
(json_['labels'] as core.Map<core.String, core.dynamic>?)?.map(
(key, value) => core.MapEntry(
key,
value as core.String,
),
),
name: json_['name'] as core.String?,
operation: json_['operation'] as core.String?,
pysparkBatch: json_.containsKey('pysparkBatch')
? PySparkBatch.fromJson(
json_['pysparkBatch'] as core.Map<core.String, core.dynamic>)
: null,
runtimeConfig: json_.containsKey('runtimeConfig')
? RuntimeConfig.fromJson(
json_['runtimeConfig'] as core.Map<core.String, core.dynamic>)
: null,
runtimeInfo: json_.containsKey('runtimeInfo')
? RuntimeInfo.fromJson(
json_['runtimeInfo'] as core.Map<core.String, core.dynamic>)
: null,
sparkBatch: json_.containsKey('sparkBatch')
? SparkBatch.fromJson(
json_['sparkBatch'] as core.Map<core.String, core.dynamic>)
: null,
sparkRBatch: json_.containsKey('sparkRBatch')
? SparkRBatch.fromJson(
json_['sparkRBatch'] as core.Map<core.String, core.dynamic>)
: null,
sparkSqlBatch: json_.containsKey('sparkSqlBatch')
? SparkSqlBatch.fromJson(
json_['sparkSqlBatch'] as core.Map<core.String, core.dynamic>)
: null,
state: json_['state'] as core.String?,
stateHistory: (json_['stateHistory'] as core.List?)
?.map((value) => StateHistory.fromJson(
value as core.Map<core.String, core.dynamic>))
.toList(),
stateMessage: json_['stateMessage'] as core.String?,
stateTime: json_['stateTime'] as core.String?,
uuid: json_['uuid'] as core.String?,
);