PySparkJob.fromJson constructor
PySparkJob.fromJson(
- Map json_
Implementation
PySparkJob.fromJson(core.Map json_)
: this(
archiveUris: json_.containsKey('archiveUris')
? (json_['archiveUris'] as core.List)
.map((value) => value as core.String)
.toList()
: null,
args: json_.containsKey('args')
? (json_['args'] as core.List)
.map((value) => value as core.String)
.toList()
: null,
fileUris: json_.containsKey('fileUris')
? (json_['fileUris'] as core.List)
.map((value) => value as core.String)
.toList()
: null,
jarFileUris: json_.containsKey('jarFileUris')
? (json_['jarFileUris'] as core.List)
.map((value) => value as core.String)
.toList()
: null,
loggingConfig: json_.containsKey('loggingConfig')
? LoggingConfig.fromJson(
json_['loggingConfig'] as core.Map<core.String, core.dynamic>)
: null,
mainPythonFileUri: json_.containsKey('mainPythonFileUri')
? json_['mainPythonFileUri'] as core.String
: null,
properties: json_.containsKey('properties')
? (json_['properties'] as core.Map<core.String, core.dynamic>)
.map(
(key, value) => core.MapEntry(
key,
value as core.String,
),
)
: null,
pythonFileUris: json_.containsKey('pythonFileUris')
? (json_['pythonFileUris'] as core.List)
.map((value) => value as core.String)
.toList()
: null,
);