Tools used to clone items between ArcGIS Online and ArcGIS Enterprise organizations ... Represents the definition of a group within ArcGIS Online or Portal.
Skiptocontent
{{message}}
Thisrepositoryhasbeenarchivedbytheowner.Itisnowread-only.
Esri
/
ago-clone-items
Publicarchive
Notifications
Fork
8
Star
15
Code
Issues
0
Pullrequests
0
Actions
Projects
0
Wiki
Security
Insights
More
Code
Issues
Pullrequests
Actions
Projects
Wiki
Security
Insights
Permalink
master
Branches
Tags
Couldnotloadbranches
Nothingtoshow
{{refName}}
default
Couldnotloadtags
Nothingtoshow
{{refName}}
default
ago-clone-items/clone_items.py
/
Jumpto
Nodefinitionsfoundinthisfile.
Gotofile
Gotofile
T
Gotoline
L
Gotodefinition
R
Copypath
Copypermalink
Thiscommitdoesnotbelongtoanybranchonthisrepository,andmaybelongtoaforkoutsideoftherepository.
Cannotretrievecontributorsatthistime
3235lines(2804sloc)
160KB
Raw
Blame
Editthisfile
E
OpeninGitHubDesktop
OpenwithDesktop
Viewraw
Viewblame
ThisfilecontainsbidirectionalUnicodetextthatmaybeinterpretedorcompileddifferentlythanwhatappearsbelow.Toreview,openthefileinaneditorthatrevealshiddenUnicodecharacters.
LearnmoreaboutbidirectionalUnicodecharacters
Showhiddencharacters
"""
-------------------------------------------------------------------------------
|Copyright2017Esri
|
|LicensedundertheApacheLicense,Version2.0(the"License");
|youmaynotusethisfileexceptincompliancewiththeLicense.
|YoumayobtainacopyoftheLicenseat
|
|http://www.apache.org/licenses/LICENSE-2.0
|
|Unlessrequiredbyapplicablelaworagreedtoinwriting,software
|distributedundertheLicenseisdistributedonan"ASIS"BASIS,
|WITHOUTWARRANTIESORCONDITIONSOFANYKIND,eitherexpressorimplied.
|SeetheLicenseforthespecificlanguagegoverningpermissionsand
|limitationsundertheLicense.
------------------------------------------------------------------------------
"""
importjson,uuid,re,tempfile,os,copy,zipfile,shutil
fromfunctoolsimportreduce
fromurllib.parseimporturlparse
fromarcgisimportgis
fromarcgis.featuresimportFeatureLayerCollection
fromarcgis.featuresimportFeatureLayer
COPY_DATA=False
USE_DEFAULT_BASEMAP=False
ADD_GPS_METADATA_FIELDS=False
SEARCH_ORG_FOR_EXISTING_ITEMS=True
ITEM_EXTENT=None
SPATIAL_REFERENCE=None
ADD_TAGS=[]
REMOVE_TAGS=[]
#regionGroupandItemDefinitionClasses
class_GroupDefinition(object):
"""
RepresentsthedefinitionofagroupwithinArcGISOnlineorPortal.
"""
def__init__(self,info,thumbnail=None,portal_group=None):
self.info=info
self.thumbnail=thumbnail
self.portal_group=portal_group
defclone(self,target):
"""Clonethegroupinthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)togroupto."""
try:
new_group=None
original_group=self.info
title=original_group['title']
tags=original_group['tags']
fortaginlist(tags):
iftag.startswith("source-")ortag.startswith("sourcefolder-"):
tags.remove(tag)
original_group['tags'].append("source-{0}".format(original_group['id']))
tags=','.join(original_group['tags'])
#Findauniquenameforthegroup
i=1
whileTrue:
search_query='title:"{0}"ANDowner:{1}'.format(title,target.users.me.username)
groups=[groupforgroupintarget.groups.search(search_query,outside_org=False)ifgroup['title']==title]
iflen(groups)==0:
break
i+=1
title="{0}{1}".format(original_group['title'],i)
thumbnail=self.thumbnail
ifnotthumbnailandself.portal_group:
temp_dir=os.path.join(_TEMP_DIR.name,original_group['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
thumbnail=self.portal_group.download_thumbnail(temp_dir)
new_group=target.groups.create(title,tags,original_group['description'],original_group['snippet'],
'private',thumbnail,True,original_group['sortField'],original_group['sortOrder'],True)
returnnew_group
exceptExceptionasex:
raise_ItemCreateException("Failedtocreategroup'{0}':{1}".format(original_group['title'],str(ex)),new_group)
class_ItemDefinition(object):
"""
RepresentsthedefinitionofanitemwithinArcGISOnlineorPortal.
"""
def__init__(self,info,data=None,sharing=None,thumbnail=None,portal_item=None):
self.info=info
self._data=data
self.sharing=sharing
ifnotself.sharing:
self.sharing={"access":"private","groups":[]}
self.thumbnail=thumbnail
self._item_property_names=['title','type','description',
'snippet','tags','culture',
'accessInformation','licenseInfo','typeKeywords','extent']
self.portal_item=portal_item
@property
defdata(self):
"""Getsthedataoftheitem"""
returncopy.deepcopy(self._data)
def_get_item_properties(self):
"""Getadictionaryofitempropertiesusedincreateandupdateoperations."""
item_properties={}
forproperty_nameinself._item_property_names:
item_properties[property_name]=self.info[property_name]
type_keywords=item_properties['typeKeywords']
forkeywordinlist(type_keywords):
ifkeyword.startswith('source-'):
type_keywords.remove(keyword)
tags=item_properties['tags']
tags.extend(ADD_TAGS)
expressions=[re.compile(x)forxinREMOVE_TAGS]
item_properties['tags']=[tfortintagsifall(notex.match(t)forexinexpressions)]
if_TARGET_MUST_EXIST_TAGinitem_properties['tags']:
item_properties['tags'].remove(_TARGET_MUST_EXIST_TAG)
if_MAINTAIN_SPATIAL_REFinitem_properties['tags']:
item_properties['tags'].remove(_MAINTAIN_SPATIAL_REF)
if_COPY_ONLY_TAGinitem_properties['tags']:
item_properties['tags'].remove(_COPY_ONLY_TAG)
type_keywords.append('source-{0}'.format(self.info['id']))
item_properties['typeKeywords']=','.join(item_properties['typeKeywords'])
item_properties['tags']=','.join(item_properties['tags'])
extent=_deep_get(item_properties,'extent')
ifITEM_EXTENTisnotNoneandextentisnotNoneandlen(extent)>0:
item_properties['extent']=ITEM_EXTENT
returnitem_properties
defclone(self,target,folder,item_mapping):
"""Clonetheiteminthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonetheitemto.
folder-Thefoldertocreatetheitemin
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
try:
new_item=None
original_item=self.info
#Gettheitempropertiesfromtheoriginalitemtobeappliedwhenthenewitemiscreated
item_properties=self._get_item_properties()
temp_dir=os.path.join(_TEMP_DIR.name,original_item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
data=self.data
ifnotdataandself.portal_item:
data=self.portal_item.download(temp_dir)
#Theitem'snamewilldefaulttothenameofthedata,ifitalreadyexistsinthefolderweneedtorenameittosomethingunique
name=os.path.basename(data)
item=next((itemforitemintarget.users.me.items(folder=_deep_get(folder,'title'))ifitem['name']==name),None)
ifitem:
new_name="{0}_{1}{2}".format(os.path.splitext(name)[0],str(uuid.uuid4()).replace('-',''),os.path.splitext(name)[1])
new_path=os.path.join(temp_dir,new_name)
os.rename(data,new_path)
data=new_path
thumbnail=self.thumbnail
ifnotthumbnailandself.portal_item:
thumbnail=self.portal_item.download_thumbnail(temp_dir)
#Addthenewitem
new_item=target.content.add(item_properties=item_properties,data=data,thumbnail=thumbnail,folder=_deep_get(folder,'title'))
return[new_item]
exceptExceptionasex:
raise_ItemCreateException("Failedtocreate{0}{1}:{2}".format(original_item['type'],original_item['title'],str(ex)),new_item)
class_TextItemDefinition(_ItemDefinition):
"""
RepresentsthedefinitionofatextbaseditemwithinArcGISOnlineorPortal.
"""
defclone(self,target,folder,item_mapping):
"""Clonetheiteminthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonetheitemto.
folder-Thefoldertocreatetheitemin
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
try:
new_item=None
original_item=self.info
#Gettheitempropertiesfromtheoriginalitemtobeappliedwhenthenewitemiscreated
item_properties=self._get_item_properties()
data=self.data
ifdata:
item_properties['text']=json.dumps(data)
thumbnail=self.thumbnail
ifnotthumbnailandself.portal_item:
temp_dir=os.path.join(_TEMP_DIR.name,original_item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
thumbnail=self.portal_item.download_thumbnail(temp_dir)
new_item=target.content.add(item_properties=item_properties,thumbnail=thumbnail,folder=_deep_get(folder,'title'))
return[new_item]
exceptExceptionasex:
raise_ItemCreateException("Failedtocreate{0}{1}:{2}".format(original_item['type'],original_item['title'],str(ex)),new_item)
class_FeatureCollectionDefinition(_TextItemDefinition):
"""
RepresentsthedefinitionofafeaturecollectionwithinArcGISOnlineorPortal.
"""
defclone(self,target,folder,item_mapping):
"""Clonetheiteminthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonetheitemto.
folder-Thefoldertocreatetheitemin
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
try:
new_item=None
original_item=self.info
#Gettheitempropertiesfromtheoriginalitemtobeappliedwhenthenewitemiscreated
item_properties=self._get_item_properties()
data=self.data
ifdata:
ifnotCOPY_DATA:
if'layers'indataanddata['layers']isnotNone:
forlayerindata['layers']:
if'featureSet'inlayerandlayer['featureSet']isnotNone:
layer['featureSet']['features']=[]
item_properties['text']=json.dumps(data)
thumbnail=self.thumbnail
ifnotthumbnailandself.portal_item:
temp_dir=os.path.join(_TEMP_DIR.name,original_item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
thumbnail=self.portal_item.download_thumbnail(temp_dir)
new_item=target.content.add(item_properties=item_properties,thumbnail=thumbnail,folder=_deep_get(folder,'title'))
return[new_item]
exceptExceptionasex:
raise_ItemCreateException("Failedtocreate{0}{1}:{2}".format(original_item['type'],original_item['title'],str(ex)),new_item)
class_FeatureServiceDefinition(_TextItemDefinition):
"""
RepresentsthedefinitionofahostedfeatureservicewithinArcGISOnlineorPortal.
"""
def__init__(self,info,service_definition,layers_definition,is_view=False,view_sources={},view_source_fields={},features=None,data=None,sharing=None,thumbnail=None,portal_item=None):
self._service_definition=service_definition
self._layers_definition=layers_definition
self._features=features
self._is_view=is_view
self._view_sources=view_sources
self._view_source_fields=view_source_fields
super().__init__(info,data,sharing,thumbnail,portal_item)
@property
defservice_definition(self):
"""Getsthedefinitionoftheservice"""
returncopy.deepcopy(self._service_definition)
@property
deflayers_definition(self):
"""Getsthelayerandtabledefinitionsoftheservice"""
returncopy.deepcopy(self._layers_definition)
@property
defis_view(self):
"""Getsiftheserviceisaview"""
returnself._is_view
@property
defview_sources(self):
"""Getsthesourcesfortheview"""
returnself._view_sources
@property
defview_source_fields(self):
"""Getstheoriginalfieldsforthesourceview"""
returnself._view_source_fields
@property
deffeatures(self):
"""Getsthefeaturesfortheservice"""
returncopy.deepcopy(self._features)
def_get_features(self,feature_layer,spatial_reference=None):
"""Getthefeaturesforthegivenfeaturelayerofafeatureservice.Returnsalistofjsonfeatures.
Keywordarguments:
feature_layer-Thefeaturelayertoreturnthefeaturesfor
spatial_reference-Thespatialreferencetoreturnthefeaturesin"""
ifspatial_referenceisNone:
spatial_reference={'wkid':3857}
total_features=[]
record_count=feature_layer.query(return_count_only=True)
max_record_count=feature_layer.properties['maxRecordCount']
ifmax_record_count<1:
max_record_count=1000
offset=0
return_z='hasZ'infeature_layer.propertiesandfeature_layer.properties['hasZ']
return_m='hasM'infeature_layer.propertiesandfeature_layer.properties['hasM']
whileoffset0:
temp_dir=os.path.join(_TEMP_DIR.name,'attachments')
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
forattachment_infoinattachment_infos:
attachment_file=original_attachments.download(original_oid,attachment_info['id'],temp_dir)
attachments.add(oid,attachment_file)
def_get_unique_name(self,target,name,item_mapping,force_add_guid_suffix):
"""Createanewuniquenamefortheservice.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonethefeatureserviceto.
name-Theoriginalname.
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
force_add_guid_suffix-Indicatesifaguidsuffixshouldautomaticallybeaddedtotheendoftheservicename
"""
ifname[0].isdigit():
name='_'+name
name=name.replace('','_')
ifnotforce_add_guid_suffix:
guids=re.findall('[0-9A-F]{32}',name,re.IGNORECASE)
forguidinguids:
ifguidinitem_mapping['GroupIDs']:
name=name.replace(guid,item_mapping['GroupIDs'][guid])
elifguidinitem_mapping['ItemIDs']:
name=name.replace(guid,item_mapping['ItemIDs'][guid])
else:
new_guid=str(uuid.uuid4()).replace('-','')
name=name.replace(guid,new_guid)
whileTrue:
iftarget.content.is_service_name_available(name,'featureService'):
break
guid=str(uuid.uuid4()).replace('-','')
ends_with_guid=re.findall('_[0-9A-F]{32}$',name,re.IGNORECASE)
iflen(ends_with_guid)>0:
name=name[:len(name)-32]+guid
else:
name="{0}_{1}".format(name,guid)
else:
guid=str(uuid.uuid4()).replace('-','')
ends_with_guid=re.findall('_[0-9A-F]{32}$',name,re.IGNORECASE)
iflen(ends_with_guid)>0:
name=name[:len(name)-32]+guid
else:
name="{0}_{1}".format(name,guid)
returnname
defclone(self,target,folder,item_mapping):
"""Clonethefeatureserviceinthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonethefeatureserviceto.
folder-Thenameofthefoldertocreatetheitemin
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
try:
new_item=None
original_item=self.info
#Getthedefinitionoftheoriginalfeatureservice
service_definition=self.service_definition
#Modifythedefinitionbeforepassingtocreatethenewservice
name=original_item['name']
ifnameisNone:
name=os.path.basename(os.path.dirname(original_item['url']))
name=self._get_unique_name(target,name,item_mapping,False)
service_definition['name']=name
forkeyin['layers','tables','fullExtent','hasViews']:
ifkeyinservice_definition:
delservice_definition[key]
#Settheextentandspatialreferenceoftheservice
original_extent=service_definition['initialExtent']
spatial_reference=None
if_MAINTAIN_SPATIAL_REFnotinoriginal_item['tags']:
spatial_reference=SPATIAL_REFERENCE
new_extent=_get_extent_definition(original_extent,ITEM_EXTENT,spatial_reference)
service_definition['initialExtent']=new_extent
service_definition['spatialReference']=new_extent['spatialReference']
ifself.is_view:
properties=['name','isView','sourceSchemaChangesAllowed','isUpdatableView','capabilities']
service_definition_copy=copy.deepcopy(service_definition)
forkey,valueinservice_definition_copy.items():
ifkeynotinproperties:
delservice_definition[key]
#RemoveanyunsupportedcapabilitiesfromlayerforPortal
supported_capabilities=['Create','Query','Editing','Update','Delete','Uploads','Sync','Extract']
iftarget.properties.isPortal:
capabilities=_deep_get(service_definition,'capabilities')
ifcapabilitiesisnotNone:
service_definition['capabilities']=','.join([xforxincapabilities.split(',')ifxinsupported_capabilities])
#Createanewfeatureservice
#InsomecasesisServiceNameAvailablereturnstruebutfailstocreatetheservicewitherrorthataservicewiththenamealreadyexists.
#Inthesecasescatchtheerrorandtryagainwithauniquename.
try:
new_item=_create_service(target,'featureService',service_definition,self.is_view,_deep_get(folder,'title'))
exceptRuntimeErrorasex:
if"alreadyexists"instr(ex):
name=self._get_unique_name(target,name,item_mapping,True)
service_definition['name']=name
new_item=target.content.create_service(name,service_type='featureService',create_params=service_definition,folder=_deep_get(folder,'title'))
elif"manageddatabase"instr(ex):
raiseException("Thetargetportal'smanageddatabasemustbeanArcGISDataStore.")
else:
raise
#Checkiftoolhasbeencanceled,raiseexceptionwithnew_itemsoitcanbecleanedup
_check_cancel_status(new_item)
#Getthelayerandtabledefinitionsfromtheoriginalserviceandpreparethemforthenewservice
layers_definition=self.layers_definition
gps_metadata=json.loads(_GPS_METADATA_FIELDS)
relationships={}
forlayerinlayers_definition['layers']+layers_definition['tables']:
#Needtoremoverelationshipsfirstandaddthembackindividually
#afteralllayersandtableshavebeenaddedtothedefinition
if'relationships'inlayerandlayer['relationships']isnotNoneandlen(layer['relationships'])!=0:
relationships[layer['id']]=layer['relationships']
layer['relationships']=[]
#Needtoremoveallindexesduplicatedforfields.
#Servicesgetintothisstateduetoabugin10.4and1.2
field_names=[f['name'].lower()forfinlayer['fields']]
unique_fields=[]
if'indexes'inlayer:
forindexinlist(layer['indexes']):
fields=index['fields'].lower()
iffieldsinunique_fieldsorfieldsnotinfield_names:
layer['indexes'].remove(index)
else:
unique_fields.append(fields)
#Duetoabugat10.5.1anydomainsforadoublefieldmustexplicitlyhaveafloatcoderatherthanint
forfieldinlayer['fields']:
field_type=_deep_get(field,'type')
iffield_type=="esriFieldTypeDouble":
coded_values=_deep_get(field,'domain','codedValues')
ifcoded_valuesisnotNone:
forcoded_valueincoded_values:
code=_deep_get(coded_value,'code')
ifcodeisnotNone:
coded_value['code']=float(code)
#AddGPSMetadatafieldstolayerdefinition
ifADD_GPS_METADATA_FIELDS:
if'geometryType'inlayerandlayer['geometryType']=='esriGeometryPoint':
metadata_fields=gps_metadata['fields']
formetadata_fieldinmetadata_fields:
ifmetadata_field['name'].lower()notinfield_names:
iftarget.properties.isPortal:
metadata_field['name']=metadata_field['name'].lower()
layer['fields'].append(metadata_field)
#Settheextentofthefeaturelayertothespecifieddefaultextent
iflayer['type']=='FeatureLayer':
layer['extent']=new_extent
#RemovehasViewspropertyifexists
if'hasViews'inlayer:
dellayer['hasViews']
#Updatetheviewlayersourceproperties
ifself.is_view:
url=self.view_sources[layer['id']]
original_feature_service=os.path.dirname(url)
original_id=os.path.basename(url)
admin_layer_info={}
layer['adminLayerInfo']=admin_layer_info
forkey,valueinitem_mapping['FeatureServices'].items():
if_compare_url(key,original_feature_service):
new_service=value
view_layer_definition={}
view_layer_definition['sourceServiceName']=os.path.basename(os.path.dirname(new_service['url']))
view_layer_definition['sourceLayerId']=new_service['layer_id_mapping'][int(original_id)]
view_layer_definition['sourceLayerFields']='*'
admin_layer_info['viewLayerDefinition']=view_layer_definition
break
#RemoveanyunsupportedcapabilitiesfromlayerforPortal
iftarget.properties.isPortal:
capabilities=_deep_get(layer,'capabilities')
ifcapabilitiesisnotNone:
layer['capabilities']=','.join([xforxincapabilities.split(',')ifxinsupported_capabilities])
#Addthelayerandtabledefinitionstotheservice
#Explicitlyaddlayersfirstandthentables,otherwisesometimesjson.dumps()reversesthemandthiseffectstheoutputservice
feature_service=FeatureLayerCollection.fromitem(new_item)
feature_service_admin=feature_service.manager
layers=[]
tables=[]
iflen(layers_definition['layers'])>0:
layers=copy.deepcopy(layers_definition['layers'])
ifself.is_view:
forlayerinlayers:
dellayer['fields']
iflen(layers_definition['tables'])>0:
tables=copy.deepcopy(layers_definition['tables'])
ifself.is_view:
fortableintables:
deltable['fields']
definition='{{"layers":{0},"tables":{1}}}'.format(json.dumps(layers),json.dumps(tables))
_add_to_definition(feature_service_admin,definition)
#Checkiftoolhasbeencanceled,raiseexceptionwithnew_itemsoitcanbecleanedup
_check_cancel_status(new_item)
#Createalookupbetweenthenewandoldlayerids
layer_id_mapping={}
layer_fields={}
original_layers=layers_definition['layers']+layers_definition['tables']
i=0
forlayerinfeature_service.layers+feature_service.tables:
layer_id_mapping[original_layers[i]['id']]=layer.properties['id']
layer_fields[layer.properties['id']]=layer.properties.fields
i+=1
#Createalookupforthelayersandtablesusingtheirid
new_layers={}
forlayerinfeature_service.layers+feature_service.tables:
forkey,valueinlayer_id_mapping.items():
ifvalue==layer.properties['id']:
new_layers[key]=layer
break
#Createafieldmappingobjectifthecaseornameofthefieldhaschanges
layer_field_mapping={}
forlayerinlayers_definition['layers']+layers_definition['tables']:
layer_id=layer['id']
fields=layer['fields']
ifself.is_view:
fields=self.view_source_fields[layer_id]
new_layer=new_layers[layer_id]
field_mapping={}
iflen(fields)<=len(new_layer.properties['fields']):
foriinrange(0,len(fields)):
iffields[i]['name']!=new_layer.properties['fields'][i]['name']:
field_mapping[fields[i]['name']]=new_layer.properties['fields'][i]['name']
iflen(field_mapping)>0:
layer_field_mapping[layer_id]=field_mapping
#Ifeditortrackingfieldschangedfromoriginallayerweneedtoupdatethelayerfieldmapping
del_fields=[]
if'editFieldsInfo'inlayerandlayer['editFieldsInfo']isnotNoneand'editFieldsInfo'innew_layer.propertiesandnew_layer.properties['editFieldsInfo']isnotNone:
new_edit_fields_info=new_layer.properties['editFieldsInfo']
forkey,old_fieldinlayer['editFieldsInfo'].items():
ifkeyinnew_edit_fields_info:
new_field=new_edit_fields_info[key]
ifold_field!=new_field:
new_delete_field=old_field
ifold_fieldinfield_mapping:
new_delete_field=field_mapping[old_field]
forfieldinnew_layer.properties['fields']:
iffield['name']==new_delete_fieldandself.is_view==False:
del_fields.append(new_delete_field)
break
iflayer_idinlayer_field_mapping:
layer_field_mapping[layer_id][old_field]=new_field
else:
field_mapping={old_field:new_field}
layer_field_mapping[layer_id]=field_mapping
update_definition={}
delete_definition={}
iflen(del_fields)>0orlayer_idinlayer_field_mapping:
#Deletetheoldeditortrackingfieldsfromthelayer
iflen(del_fields)>0:
layer_admin=new_layer.manager
delete_definition_fields=[]
forfieldindel_fields:
delete_definition_fields.append({'name':field})
delete_definition['fields']=delete_definition_fields
#Updateeditingtemplatesiffieldmappingisrequired
iflayer_idinlayer_field_mapping:
field_mapping=layer_field_mapping[layer_id]
if'templates'innew_layer.propertiesandnew_layer.properties['templates']isnotNone:
templates=new_layer.properties['templates']
fortemplateintemplates:
if'prototype'intemplateandtemplate['prototype']isnotNone:
_update_feature_attributes(template['prototype'],field_mapping)
update_definition['templates']=templates
if'types'innew_layer.propertiesandnew_layer.properties['types']isnotNone:
types=new_layer.properties['types']
forlayer_typeintypes:
if'templates'inlayer_typeandlayer_type['templates']isnotNone:
fortemplateinlayer_type['templates']:
if'prototype'intemplateandtemplate['prototype']isnotNone:
_update_feature_attributes(template['prototype'],field_mapping)
update_definition['types']=types
#Updatefieldvisibilityforviews
ifself.is_view:
if'viewDefinitionQuery'inlayerandlayer['viewDefinitionQuery']:
update_definition['viewDefinitionQuery']=layer['viewDefinitionQuery']
iflayer_idinlayer_field_mapping:
update_definition['viewDefinitionQuery']=_find_and_replace_fields(update_definition['viewDefinitionQuery'],layer_field_mapping[layer_id])
field_visibility=[]
need_update=False
view_field_names=[f['name'].lower()forfinlayer['fields']]
forsource_fieldinself.view_source_fields[layer_id]:
source_field_name=source_field['name']
visible=source_field_name.lower()inview_field_names
ifnotvisible:
need_update=True
field_name=source_field_name
iflayer_idinlayer_field_mapping:
ifsource_field_nameinlayer_field_mapping[layer_id]:
field_name=layer_field_mapping[layer_id][source_field_name]
field_visibility.append({'name':field_name,'visible':visible})
ifneed_update:
update_definition['fields']=field_visibility
#Updatethedefinitionofthelayer
iflen(update_definition)>0orlen(delete_definition)>0:
layer_admin=new_layer.manager
iflen(update_definition)>0:
layer_admin.update_definition(update_definition)
iflen(delete_definition)>0:
layer_admin.delete_from_definition(delete_definition)
#Checkiftoolhasbeencanceled,raiseexceptionwithnew_itemsoitcanbecleanedup
_check_cancel_status(new_item)
#Addtherelationshipsbacktothelayers
relationship_field_mapping={}
iflen(relationships)>0andself.is_view==False:
forlayer_idinrelationships:
forrelationshipinrelationships[layer_id]:
iflayer_idinlayer_field_mapping:
field_mapping=layer_field_mapping[layer_id]
ifrelationship['keyField']infield_mapping:
relationship['keyField']=field_mapping[relationship['keyField']]
related_table_id=relationship['relatedTableId']
ifrelated_table_idinlayer_field_mapping:
field_mapping=layer_field_mapping[related_table_id]
iflayer_idnotinrelationship_field_mapping:
relationship_field_mapping[layer_id]={}
relationship_field_mapping[layer_id][relationship['id']]=field_mapping
relationships_copy=copy.deepcopy(relationships)
forlayer_idinrelationships_copy:
forrelationshipinrelationships_copy[layer_id]:
relationship['relatedTableId']=layer_id_mapping[relationship['relatedTableId']]
iftarget.properties.isPortal:
relationships_definition={'layers':[]}
forkey,valueinlayer_id_mapping.items():
ifkeyinrelationships_copy:
relationships_definition['layers'].append({'id':value,'relationships':relationships_copy[key]})
else:
relationships_definition['layers'].append({'id':value,'relationships':[]})
feature_service_admin.add_to_definition(relationships_definition)
else:
forlayer_idinrelationships_copy:
layer=new_layers[layer_id]
layer.manager.add_to_definition({'relationships':relationships_copy[layer_id]})
#Checkiftoolhasbeencanceled,raiseexceptionwithnew_itemsoitcanbecleanedup
_check_cancel_status(new_item)
#Gettheitempropertiesfromtheoriginalitem
item_properties=self._get_item_properties()
#Mergetypekeywordsfromwhatiscreatedbydefaultforthenewitemandwhatwasintheoriginalitem
type_keywords=list(new_item['typeKeywords'])
type_keywords.extend(item_properties['typeKeywords'].split(','))
type_keywords=list(set(type_keywords))
#Replacetypekeywordifitreferencesanitemidofcloneditem,ex.Survey123
forkeywordinlist(type_keywords):
ifkeywordinitem_mapping['ItemIDs']:
type_keywords.remove(keyword)
type_keywords.append(item_mapping['ItemIDs'][keyword])
item_properties['typeKeywords']=','.join(type_keywords)
#Getthecollectionoflayersandtablesfromtheitemdata
data=self.data
layers=[]
ifdataand'layers'indataanddata['layers']isnotNone:
layers+=[layerforlayerindata['layers']]
ifdataand'tables'indataanddata['tables']isnotNone:
layers+=[layerforlayerindata['tables']]
#Updateanypop-up,labelingorrendererfieldreferences
forlayer_idinlayer_field_mapping:
layer=next((layerforlayerinlayersiflayer['id']==layer_id),None)
iflayer:
_update_layer_fields(layer,layer_field_mapping[layer_id])
forlayer_idinrelationship_field_mapping:
layer=next((layerforlayerinlayersiflayer['id']==layer_id),None)
iflayer:
_update_layer_related_fields(layer,relationship_field_mapping[layer_id])
#Updatethelayerid
forlayerinlayers:
layer['id']=layer_id_mapping[layer['id']]
#AddGPSMetadatafieldinfostothepop-upofthelayer
ifADD_GPS_METADATA_FIELDS:
gps_metadata_field_infos=gps_metadata['popup']
forlayerinlayers:
field_infos=_deep_get(layer,'popupInfo','fieldInfos')
iffield_infosisnotNone:
fields=layer_fields[layer['id']]
field_names=[f['name'].lower()forfinfields]
field_info_names=[_deep_get(f,'fieldName').lower()forfinfield_infos]
forgps_metadata_field_infoingps_metadata_field_infos:
gps_field_name=gps_metadata_field_info['fieldName'].lower()
ifgps_field_nameinfield_namesandgps_field_namenotinfield_info_names:
i=field_names.index(gps_field_name)
gps_metadata_field_info['fieldName']=fields[i]['name']
field_infos.append(gps_metadata_field_info)
#Setthedatatothetextpropertiesoftheitem
ifdata:
item_properties['text']=json.dumps(data)
#Iftheitemtitlehasaguid,checkifitisintheitem_mappingandreplaceifitis.
guids=re.findall('[0-9A-F]{32}',item_properties['title'],re.IGNORECASE)
forguidinguids:
ifguidinitem_mapping['GroupIDs']:
item_properties['title']=item_properties['title'].replace(guid,item_mapping['GroupIDs'][guid])
elifguidinitem_mapping['ItemIDs']:
item_properties['title']=item_properties['title'].replace(guid,item_mapping['ItemIDs'][guid])
#Updatetheitemdefinitionoftheservice
thumbnail=self.thumbnail
ifnotthumbnailandself.portal_item:
temp_dir=os.path.join(_TEMP_DIR.name,original_item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
thumbnail=self.portal_item.download_thumbnail(temp_dir)
new_item.update(item_properties=item_properties,thumbnail=thumbnail)
#Checkiftoolhasbeencanceled,raiseexceptionwithnew_itemsoitcanbecleanedup
_check_cancel_status(new_item)
#Copyfeaturesfromoriginalitem
ifCOPY_DATAandnotself.is_view:
self._add_features(new_layers,relationships,layer_field_mapping,feature_service.properties['spatialReference'])
return[new_item,layer_field_mapping,layer_id_mapping,layer_fields,relationship_field_mapping]
except_CustomCancelExceptionasex:
raiseex
exceptExceptionasex:
raise_ItemCreateException("Failedtocreate{0}{1}:{2}".format(original_item['type'],original_item['title'],str(ex)),new_item)
class_WebMapDefinition(_TextItemDefinition):
"""
RepresentsthedefinitionofawebmapwithinArcGISOnlineorPortal.
"""
defclone(self,target,folder,item_mapping):
"""Clonethewebmapinthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonethewebmapto
folder-Thefoldertocreatetheitemin
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
try:
new_item=None
original_item=self.info
#Gettheitempropertiesfromtheoriginalwebmapwhichwillbeappliedwhenthenewitemiscreated
item_properties=self._get_item_properties()
#SwizzletheitemidsandURLsofthefeaturelayersandtablesinthewebmap
webmap_json=self.data
layers=[]
feature_collections=[]
if'operationalLayers'inwebmap_json:
layers+=[layerforlayerinwebmap_json['operationalLayers']if'layerType'inlayerandlayer['layerType']=="ArcGISFeatureLayer"and'url'inlayer]
feature_collections+=[layerforlayerinwebmap_json['operationalLayers']if'layerType'inlayerandlayer['layerType']=="ArcGISFeatureLayer"and'type'inlayerandlayer['type']=="FeatureCollection"]
if'tables'inwebmap_json:
layers+=[tablefortableinwebmap_json['tables']if'url'intable]
gps_metadata_field_infos=json.loads(_GPS_METADATA_FIELDS)['popup']
forlayerinlayers:
feature_service_url=os.path.dirname(layer['url'])
fororiginal_urlinitem_mapping['FeatureServices']:
if_compare_url(feature_service_url,original_url):
new_service=item_mapping['FeatureServices'][original_url]
layer_id=int(os.path.basename(layer['url']))
new_id=new_service['layer_id_mapping'][layer_id]
layer['url']="{0}/{1}".format(new_service['url'],new_id)
layer['itemId']=new_service['id']
iflayer_idinnew_service['layer_field_mapping']:
_update_layer_fields(layer,new_service['layer_field_mapping'][layer_id])
iflayer_idinnew_service['relationship_field_mapping']:
_update_layer_related_fields(layer,new_service['relationship_field_mapping'][layer_id])
#Iflayercontainsgpsmetadatafields,butarenotinthepopupdefinitionaddthem
ifADD_GPS_METADATA_FIELDSandnew_idinnew_service['layer_fields']:
fields=new_service['layer_fields'][new_id]
field_names=[f['name'].lower()forfinfields]
field_infos=_deep_get(layer,'popupInfo','fieldInfos')
iffield_infosisnotNone:
field_info_names=[_deep_get(f,'fieldName').lower()forfinfield_infos]
forgps_metadata_field_infoingps_metadata_field_infos:
gps_field_name=gps_metadata_field_info['fieldName'].lower()
ifgps_field_nameinfield_namesandgps_field_namenotinfield_info_names:
i=field_names.index(gps_field_name)
gps_metadata_field_info['fieldName']=fields[i]['name']
field_infos.append(gps_metadata_field_info)
break
forfeature_collectioninfeature_collections:
if'itemId'infeature_collectionandfeature_collection['itemId']isnotNoneandfeature_collection['itemId']initem_mapping['ItemIDs']:
feature_collection['itemId']=item_mapping['ItemIDs'][feature_collection['itemId']]
#Changethebasemaptothedefaultbasemapdefinedinthetargetorganization
ifUSE_DEFAULT_BASEMAP:
properties=target.properties
if'defaultBasemap'inpropertiesandproperties['defaultBasemap']isnotNone:
default_basemap=properties['defaultBasemap']
if'title'indefault_basemapand'baseMapLayers'indefault_basemapanddefault_basemap['baseMapLayers']isnotNone:
forkeyin[kforkindefault_basemap]:
ifkeynotin['title','baseMapLayers']:
deldefault_basemap[key]
forbasemap_layerindefault_basemap['baseMapLayers']:
if'resourceInfo'inbasemap_layer:
delbasemap_layer['resourceInfo']
webmap_json['baseMap']=default_basemap
#Addthewebmaptothetargetportal
item_properties['text']=json.dumps(webmap_json)
thumbnail=self.thumbnail
ifnotthumbnailandself.portal_item:
temp_dir=os.path.join(_TEMP_DIR.name,original_item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
thumbnail=self.portal_item.download_thumbnail(temp_dir)
new_item=target.content.add(item_properties=item_properties,thumbnail=thumbnail,folder=_deep_get(folder,'title'))
return[new_item]
exceptExceptionasex:
raise_ItemCreateException("Failedtocreate{0}{1}:{2}".format(original_item['type'],original_item['title'],str(ex)),new_item)
class_ApplicationDefinition(_TextItemDefinition):
"""
RepresentsthedefinitionofanapplicationwithinArcGISOnlineorPortal.
"""
def__init__(self,info,source_app_title=None,update_url=True,data=None,sharing=None,thumbnail=None,portal_item=None):
self._source_app_title=source_app_title
self._update_url=update_url
super().__init__(info,data,sharing,thumbnail,portal_item)
@property
defsource_app_title(self):
"""Getsthetitleoftheapplication"""
returnself._source_app_title
@property
defupdate_url(self):
"""Getsavalueindicatingiftheapplicationurlshouldbeupdated"""
returnself._update_url
defclone(self,target,folder,item_mapping):
"""Clonetheapplicationinthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonethewebmapto
folder-Thefoldertocreatetheitemin
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
try:
new_item=None
original_item=self.info
org_url=_get_org_url(target)
is_web_appbuilder=False
#Gettheitempropertiesfromtheoriginalapplicationwhichwillbeappliedwhenthenewitemiscreated
item_properties=self._get_item_properties()
#Swizzletheitemidsofthewebmaps,groupsandURLsofdefinedintheapplication'sdata
app_json=self.data
ifapp_jsonisnotNone:
app_json_text=''
#Ifitemisastorymapdon'tswizzleanyofthejsonreferences
if'StoryMap'inoriginal_item['typeKeywords']or'StoryMaps'inoriginal_item['typeKeywords']:
app_json_text=json.dumps(app_json)
else:
if"WebAppBuilder"inoriginal_item['typeKeywords']:#WebAppBuilder
is_web_appbuilder=True
if'portalUrl'inapp_json:
app_json['portalUrl']=org_url
if'map'inapp_json:
if'portalUrl'inapp_json['map']:
app_json['map']['portalUrl']=org_url
if'itemId'inapp_json['map']:
app_json['map']['itemId']=item_mapping['ItemIDs'][app_json['map']['itemId']]
if'mapOptions'inapp_json['map']andapp_json['map']['mapOptions']isnotNone:
if'extent'inapp_json['map']['mapOptions']:
delapp_json['map']['mapOptions']['extent']
if'httpProxy'inapp_json:
if'url'inapp_json['httpProxy']:
app_json['httpProxy']['url']=org_url+"sharing/proxy"
if'geometryService'inapp_jsonand'geometry'intarget.properties['helperServices']:
app_json['geometryService']=target.properties['helperServices']['geometry']['url']
eliforiginal_item['type']in["OperationView","Dashboard"]:#OperationsDashboard
if'widgets'inapp_json:
forwidgetinapp_json['widgets']:
ifwidget['type']=='mapWidget':
if'itemId'inwidget:
widget['itemId']=item_mapping['ItemIDs'][widget['itemId']]
elif'mapId'inwidget:
widget['mapId']=item_mapping['ItemIDs'][widget['mapId']]
else:#ConfigurableApplicationTemplate
if'folderId'inapp_json:
app_json['folderId']=_deep_get(folder,'id')
if'values'inapp_json:
if'group'inapp_json['values']:
app_json['values']['group']=item_mapping['GroupIDs'][app_json['values']['group']]
if'webmap'inapp_json['values']:
ifisinstance(app_json['values']['webmap'],list):
new_webmap_ids=[]
forwebmap_idinapp_json['values']['webmap']:
new_webmap_ids.append(item_mapping['ItemIDs'][webmap_id])
app_json['values']['webmap']=new_webmap_ids
else:
app_json['values']['webmap']=item_mapping['ItemIDs'][app_json['values']['webmap']]
ifself.source_app_titleisnotNone:
search_query='title:"{0}"ANDowner:{1}ANDtype:WebMappingApplication'.format(self.source_app_title,"esri_en")
search_items=target.content.search(search_query,max_items=100,outside_org=True)
iflen(search_items)>0:
existing_item=max(search_items,key=lambdax:x['created'])
app_json['source']=existing_item['id']
app_json_text=json.dumps(app_json)
fororiginal_urlinitem_mapping['FeatureServices']:
service=item_mapping['FeatureServices'][original_url]
forkey,valueinservice['layer_id_mapping'].items():
app_json_text=re.sub("{0}/{1}".format(original_url,key),
"{0}/{1}".format(service['url'],value),
app_json_text,0,re.IGNORECASE)
app_json_text=re.sub(original_url,service['url'],app_json_text,0,re.IGNORECASE)
fororiginal_idinitem_mapping['ItemIDs']:
app_json_text=re.sub(original_id,item_mapping['ItemIDs'][original_id],app_json_text,0,re.IGNORECASE)
#Replaceanyreferencestodefaultprintservice
new_print_url=_deep_get(target.properties,'helperServices','printTask','url')
ifnew_print_urlisnotNone:
old_print_url='https://utility.arcgisonline.com/arcgis/rest/services/Utilities/PrintingTools/GPServer/Export%20Web%20Map%20Task'
ifself.portal_itemisnotNoneand_deep_get(self.portal_item._gis.properties,'helperServices','printTask','url')isnotNone:
old_print_url=_deep_get(self.portal_item._gis.properties,'helperServices','printTask','url')
app_json_text=re.sub(old_print_url,new_print_url,app_json_text,0,re.IGNORECASE)
ifold_print_url.startswith('https://'):
app_json_text=re.sub('http://'+old_print_url[8:],new_print_url,app_json_text,0,re.IGNORECASE)
elifold_print_url.startswith('http://'):
app_json_text=re.sub('https://'+old_print_url[7:],new_print_url,app_json_text,0,re.IGNORECASE)
#Performageneralfindandreplaceoffieldnamesiffieldmappingisrequired
forserviceinitem_mapping['FeatureServices']:
forlayer_idinitem_mapping['FeatureServices'][service]['layer_field_mapping']:
field_mapping=item_mapping['FeatureServices'][service]['layer_field_mapping'][layer_id]
app_json_text=_find_and_replace_fields(app_json_text,field_mapping)
#Replaceanyreferencestotheoriginalorgurlwiththetargetorgurl.Usedtore-pointitemresourcereferences
iforiginal_item['url']isnotNone:
url=original_item['url']
find_string="/apps/"
index=url.find(find_string)
ifindex!=-1:
source_org_url=url[:index+1]
app_json_text=re.sub(source_org_url,org_url,app_json_text,0,re.IGNORECASE)
item_properties['text']=app_json_text
#Addtheapplicationtothetargetportal
thumbnail=self.thumbnail
ifnotthumbnailandself.portal_item:
temp_dir=os.path.join(_TEMP_DIR.name,original_item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
thumbnail=self.portal_item.download_thumbnail(temp_dir)
new_item=target.content.add(item_properties=item_properties,thumbnail=thumbnail,folder=_deep_get(folder,'title'))
#Addtheresourcestothenewitem
ifself.portal_item:
resources=self.portal_item.resources
resource_list=resources.list()
iflen(resource_list)>0:
resources_dir=os.path.join(_TEMP_DIR.name,original_item['id'],'resources')
ifnotos.path.exists(resources_dir):
os.makedirs(resources_dir)
forresourceinresource_list:
resource_path=resources.get(resource['resource'],False,resources_dir)
folder_name=None
resource_name=resource['resource']
iflen(resource_name.split('/'))==2:
folder_name,resource_name=resource_name.split('/')
new_item.resources.add(resource_path,folder_name,resource_name)
#Updatetheurloftheitemtopointtothenewportalandnewidoftheapplicationifrequired
iforiginal_item['url']isnotNone:
url=original_item['url']
ifself.update_url:
find_string="/apps/"
index=original_item['url'].find(find_string)
url='{0}{1}'.format(org_url.rstrip('/'),original_item['url'][index:])
find_string="id="
index=url.find(find_string)
url='{0}{1}'.format(url[:index+len(find_string)],new_item.id)
item_properties={'url':url}
new_item.update(item_properties)
#AddacodeattachmentiftheapplicationisWebAppBuildersothatitcanbedownloaded
ifis_web_appbuilder:
url='{0}sharing/rest/content/items/{1}/package'.format(org_url[org_url.find('://')+1:],new_item['id'])
code_attachment_properties={'title':new_item['title'],'type':'CodeAttachment','typeKeywords':'Code,WebMappingApplication,Javascript',
'relationshipType':'WMA2Code','originItemId':new_item['id'],'url':url}
target.content.add(item_properties=code_attachment_properties,folder=_deep_get(folder,'title'))
return[new_item]
exceptExceptionasex:
raise_ItemCreateException("Failedtocreate{0}{1}:{2}".format(original_item['type'],original_item['title'],str(ex)),new_item)
class_FormDefinition(_ItemDefinition):
"""
RepresentsthedefinitionofanformwithinArcGISOnlineorPortal.
"""
def__init__(self,info,related_items,data=None,sharing=None,thumbnail=None,portal_item=None):
self._related_items=related_items
super().__init__(info,data,sharing,thumbnail,portal_item)
@property
defrelated_items(self):
"""Getstherelateditemsforthesurvey"""
returnself._related_items
defclone(self,target,folder,item_mapping):
"""Clonetheforminthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonetheformto
folder-Thefoldertocreatetheitemin
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
try:
new_item=None
original_item=self.info
#Gettheitempropertiesfromtheoriginalitemtobeappliedwhenthenewitemiscreated
item_properties=self._get_item_properties()
thumbnail=self.thumbnail
ifnotthumbnailandself.portal_item:
temp_dir=os.path.join(_TEMP_DIR.name,original_item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
thumbnail=self.portal_item.download_thumbnail(temp_dir)
#Addthenewitem
new_item=target.content.add(item_properties=item_properties,data=None,thumbnail=thumbnail,folder=_deep_get(folder,'title'))
return[new_item]
exceptExceptionasex:
raise_ItemCreateException("Failedtocreate{0}{1}:{2}".format(original_item['type'],original_item['title'],str(ex)),new_item)
defupdate_form(self,target,new_item,item_mapping):
"""Updatetheformwithformzipdatainthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toupdatetheitem
new_item-Theformitemtoupdate
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
original_item=self.info
temp_dir=os.path.join(_TEMP_DIR.name,original_item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
form_zip=self.portal_item.download(temp_dir)
zip_file=zipfile.ZipFile(form_zip)
try:
#Extracttheziparchivetoasubfolder
new_dir=os.path.join(temp_dir,'extract')
zip_dir=os.path.join(new_dir,'esriinfo')
zip_file.extractall(new_dir)
zip_file.close()
feature_service_url=None
form_json=None
#Loopthroughthefilesandupdatereferencestothefeatureserviceanditemid
forpathinos.listdir(zip_dir):
ifos.path.splitext(path)[1].lower()=='.info':
withopen(os.path.join(zip_dir,path),'r')asfile:
data=json.loads(file.read())
original_url=data['serviceInfo']['url']
forkey,valueinitem_mapping['FeatureServices'].items():
if_compare_url(original_url,key):
data['serviceInfo']['itemId']=value['id']
data['serviceInfo']['url']=value['url']
feature_service_url=value['url']
break
withopen(os.path.join(zip_dir,path),'w')asfile:
file.write(json.dumps(data))
elifos.path.splitext(path)[1].lower()=='.xml'orpath.lower()=='webform.json':
withopen(os.path.join(zip_dir,path),'r')asfile:
data=file.read()
data=data.replace(original_item['id'],new_item['id'])
forkey,valueinitem_mapping['FeatureServices'].items():
data=re.sub(key,value['url'],data,0,re.IGNORECASE)
withopen(os.path.join(zip_dir,path),'w')asfile:
file.write(data)
elifos.path.splitext(path)[1].lower()=='.iteminfo':
withopen(os.path.join(zip_dir,path),'w')asfile:
file.write(json.dumps(dict(new_item)))
elifpath.lower()=='form.json':
withopen(os.path.join(zip_dir,path),'r')asfile:
form_json=file.read()
elifos.path.splitext(path)[1].lower()=='.xlsx':
xlsx=zipfile.ZipFile(os.path.join(zip_dir,path))
xlsx_dir=os.path.join(zip_dir,'xlsx')
try:
xlsx.extractall(xlsx_dir)
xlsx.close()
withopen(os.path.join(xlsx_dir,'xl/sharedStrings.xml'),'r')asfile:
data=file.read()
forkey,valueinitem_mapping['FeatureServices'].items():
data=re.sub(key,value['url'],data,0,re.IGNORECASE)
withopen(os.path.join(xlsx_dir,'xl/sharedStrings.xml'),'w')asfile:
file.write(data)
xlsx=zipfile.ZipFile(os.path.join(zip_dir,path),'w',zipfile.ZIP_DEFLATED)
_zip_dir(xlsx_dir,xlsx,False)
exceptException:
continue
finally:
xlsx.close()
ifos.path.exists(xlsx_dir):
shutil.rmtree(xlsx_dir)
#Addarelationshipbetweenthenewsurveyandtheservice
forrelated_iteminself.related_items:
forkey,valueinitem_mapping['FeatureServices'].items():
if_compare_url(related_item['url'],key):
feature_service=target.content.get(value['id'])
_add_relationship(new_item,feature_service,'Survey2Service')
break
#Ifthesurveywasauthoredonthewebaddtheweb_jsontothemetadatatableintheservice
ifform_jsonisnotNoneandfeature_service_urlisnotNone:
svc=FeatureLayerCollection(feature_service_url,target)
table=next((tfortinsvc.tablesift.properties.name=='metadata'),None)
iftableisnotNone:
deletes=table.query(where="name='form'")
table.edit_features(adds=[{'attributes':{'name':'form','value':form_json}}],deletes=deletes)
#Zipthedirectory
zip_file=zipfile.ZipFile(form_zip,'w',zipfile.ZIP_DEFLATED)
_zip_dir(zip_dir,zip_file)
zip_file.close()
#Uploadtheziptotheitem
new_item.update(data=form_zip)
exceptExceptionasex:
raiseException("Failedtoupdate{0}{1}:{2}".format(new_item['type'],new_item['title'],str(ex)))
finally:
zip_file.close()
class_WorkforceProjectDefinition(_TextItemDefinition):
"""
RepresentsthedefinitionofanworkforceprojectwithinArcGISOnlineorPortal.
"""
defclone(self,target,folder,item_mapping):
"""Clonetheforminthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonetheworkforceprojectto
folder-Thefoldertocreatetheitemin
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
try:
new_item=None
original_item=self.info
#Gettheitempropertiesfromtheoriginalapplicationwhichwillbeappliedwhenthenewitemiscreated
item_properties=self._get_item_properties()
workforce_json=self.data
#Updatethewebmapreferences
webmaps=['workerWebMapId','dispatcherWebMapId']
forwebmapinwebmaps:
original_id=_deep_get(workforce_json,webmap)
iforiginal_idisnotNoneandoriginal_idinitem_mapping['ItemIDs']:
workforce_json[webmap]=item_mapping['ItemIDs'][original_id]
#Updatetheservicereferences
services=['dispatchers','assignments','workers','tracks']
forserviceinservices:
service_definiton=_deep_get(workforce_json,service)
ifservice_definitonisnotNone:
layer_url=_deep_get(service_definiton,'url')
feature_service_url=os.path.dirname(layer_url)
forkey,valueinitem_mapping['FeatureServices'].items():
if_compare_url(feature_service_url,key):
layer_id=int(os.path.basename(layer_url))
new_id=value['layer_id_mapping'][layer_id]
service_definiton['url']="{0}/{1}".format(value['url'],new_id)
service_definiton['serviceItemId']=value['id']
ifservice=='dispatchers':
feature_layer=FeatureLayer(service_definiton['url'],target)
user=target.users.me
features=feature_layer.query("userId='{0}'".format(user.username)).features
iflen(features)==0:
features=[{"attributes":{"name":user.fullName,"userId":user.username}}]
feature_layer.edit_features(adds=features)
break
#Updatethegroupreference
group_id=_deep_get(workforce_json,'groupId')
workforce_json['groupId']=item_mapping['GroupIDs'][group_id]
#Updatethefolderreference
workforce_json['folderId']=folder['id']
#Updatetheapplicationintegrationreferences
integrations=_deep_get(workforce_json,'assignmentIntegrations')
ifintegrationsisnotNone:
forintegrationinintegrations:
url_template=_deep_get(integration,'urlTemplate')
ifurl_templateisnotNone:
item_references=re.findall('itemID=[0-9A-F]{32}',url_template,re.IGNORECASE)
foritem_referenceinitem_references:
item_id=item_reference[7:]
ifitem_idinitem_mapping['ItemIDs']:
integration['urlTemplate']=url_template.replace(item_id,item_mapping['ItemIDs'][item_id])
#Addtheprojecttothetargetportal
item_properties['text']=json.dumps(workforce_json)
thumbnail=self.thumbnail
ifnotthumbnailandself.portal_item:
temp_dir=os.path.join(_TEMP_DIR.name,original_item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
thumbnail=self.portal_item.download_thumbnail(temp_dir)
new_item=target.content.add(item_properties=item_properties,thumbnail=thumbnail,folder=_deep_get(folder,'title'))
return[new_item]
exceptExceptionasex:
raise_ItemCreateException("Failedtocreate{0}{1}:{2}".format(original_item['type'],original_item['title'],str(ex)),new_item)
class_ProMapDefinition(_ItemDefinition):
"""
RepresentsthedefinitionofanpromapwithinArcGISOnlineorPortal.
"""
defclone(self,target,folder,item_mapping):
"""Clonethepromapinthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonetheworkforceprojectto
folder-Thefoldertocreatetheitemin
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
try:
new_item=None
original_item=self.info
mapx=self.data
map_json=None
withopen(mapx,'r')asfile:
map_json=json.loads(file.read())
data_connections=[]
layer_definitions=_deep_get(map_json,'layerDefinitions')
iflayer_definitionsisnotNone:
forlayer_definitioninlayer_definitions:
data_connection=_deep_get(layer_definition,'featureTable','dataConnection')
ifdata_connectionisnotNone:
data_connections.append(data_connection)
table_definitions=_deep_get(map_json,'tableDefinitions')
iftable_definitionsisnotNone:
fortable_definitionintable_definitions:
data_connection=_deep_get(table_definition,'dataConnection')
ifdata_connectionisnotNone:
data_connections.append(data_connection)
fordata_connectionindata_connections:
if'workspaceFactory'indata_connectionanddata_connection['workspaceFactory']=='FeatureService':
if'workspaceConnectionString'indata_connectionanddata_connection['workspaceConnectionString']isnotNone:
feature_service_url=data_connection['workspaceConnectionString'][4:]
fororiginal_urlinitem_mapping['FeatureServices']:
if_compare_url(feature_service_url,original_url):
new_service=item_mapping['FeatureServices'][original_url]
layer_id=int(data_connection['dataset'])
new_id=new_service['layer_id_mapping'][layer_id]
data_connection['workspaceConnectionString']="URL={0}".format(new_service['url'])
data_connection['dataset']=new_id
new_mapx_dir=os.path.join(os.path.dirname(mapx),'new_mapx')
os.makedirs(new_mapx_dir)
new_mapx=os.path.join(new_mapx_dir,os.path.basename(mapx))
withopen(new_mapx,'w')asfile:
file.write(json.dumps(map_json))
self._data=new_mapx
returnsuper().clone(target,folder,item_mapping)
exceptExceptionasex:
ifisinstance(ex,_ItemCreateException):
raise
raise_ItemCreateException("Failedtocreate{0}{1}:{2}".format(original_item['type'],original_item['title'],str(ex)),new_item)
finally:
self._data=mapx
new_mapx_dir=os.path.join(os.path.dirname(mapx),'new_mapx')
ifos.path.exists(new_mapx_dir):
shutil.rmtree(new_mapx_dir)
class_ProProjectPackageDefinition(_ItemDefinition):
"""
RepresentsthedefinitionofanpromapwithinArcGISOnlineorPortal.
"""
defclone(self,target,folder,item_mapping):
"""Clonethepromapinthetargetorganization.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)toclonetheworkforceprojectto
folder-Thefoldertocreatetheitemin
item_mapping-Dictionarycontainingmappingbetweennewandolditems.
"""
try:
new_item=None
aprx=None
map=None
maps=None
layers=None
lyr=None
original_item=self.info
ppkx=self.data
if_COPY_ONLY_TAGnotinoriginal_item['tags']:
try:
importarcpy
extract_dir=os.path.join(os.path.dirname(ppkx),'extract')
ifnotos.path.exists(extract_dir):
os.makedirs(extract_dir)
arcpy.ExtractPackage_management(ppkx,extract_dir)
#1.xversionsofProuseadifferentfoldername
project_folder='p20'
version=arcpy.GetInstallInfo()['Version']
ifversion.startswith('1'):
project_folder='p12'
project_dir=os.path.join(extract_dir,project_folder)
ifos.path.exists(project_dir):
aprx_files=[fforfinos.listdir(project_dir)iff.endswith('.aprx')]
iflen(aprx_files)==1:
service_version_infos={}
aprx_file=os.path.join(project_dir,aprx_files[0])
aprx=arcpy.mp.ArcGISProject(aprx_file)
maps=aprx.listMaps()
formapinmaps:
layers=[lforlinmap.listLayers()ifl.supports('connectionProperties')]
layers.extend(map.listTables())
forlyrinlayers:
connection_properties=lyr.connectionProperties
workspace_factory=_deep_get(connection_properties,'workspace_factory')
service_url=_deep_get(connection_properties,'connection_info','url')
ifworkspace_factory=='FeatureService'andservice_urlisnotNone:
fororiginal_urlinitem_mapping['FeatureServices']:
if_compare_url(service_url,original_url):
new_service=item_mapping['FeatureServices'][original_url]
layer_id=int(connection_properties['dataset'])
new_id=new_service['layer_id_mapping'][layer_id]
new_connection_properties=copy.deepcopy(connection_properties)
new_connection_properties['connection_info']['url']=new_service['url']
new_connection_properties['dataset']=str(new_id)
ifnew_service['url']notinservice_version_infos:
try:
service_version_infos[new_service['url']]=_get_version_management_server(target,new_service['url'])
except:
service_version_infos[new_service['url']]={}
version_info=service_version_infos[new_service['url']]
forkey,valuein{'defaultVersionName':'version','defaultVersionGuid':'versionguid'}.items():
ifkeyinversion_info:
new_connection_properties['connection_info'][value]=version_info[key]
elifvalueinnew_connection_properties['connection_info']:
delnew_connection_properties['connection_info'][value]
lyr.updateConnectionProperties(connection_properties,new_connection_properties,validate=False)
aprx.save()
additional_files=None
user_data=os.path.join(os.path.dirname(ppkx),'extract','commondata','userdata')
ifos.path.exists(user_data):
additional_files=[os.path.join(user_data,f)forfinos.listdir(user_data)]
new_package_dir=os.path.join(os.path.dirname(ppkx),'new_package')
os.makedirs(new_package_dir)
new_package=os.path.join(new_package_dir,os.path.basename(ppkx))
item_properties=self._get_item_properties()
description=original_item['title']
ifitem_properties['snippet']isnotNone:
description=item_properties['snippet']
arcpy.management.PackageProject(aprx_file,new_package,"INTERNAL","PROJECT_PACKAGE","DEFAULT","ALL",additional_files,description,item_properties['tags'],"ALL")
self._data=new_package
exceptImportError:
pass
returnsuper().clone(target,folder,item_mapping)
exceptExceptionasex:
ifisinstance(ex,_ItemCreateException):
raise
raise_ItemCreateException("Failedtocreate{0}{1}:{2}".format(original_item['type'],original_item['title'],str(ex)),new_item)
finally:
delaprx,map,maps,layers,lyr
self._data=ppkx
extract_dir=os.path.join(os.path.dirname(ppkx),'extract')
ifos.path.exists(extract_dir):
shutil.rmtree(extract_dir)
new_package_dir=os.path.join(os.path.dirname(ppkx),'new_package')
ifos.path.exists(new_package_dir):
shutil.rmtree(new_package_dir)
class_ItemCreateException(Exception):
"""
Exceptionraisedduringthecreationofnewitems,usedtoclean-upanypartiallycreateditemsintheprocess.
"""
pass
class_CustomCancelException(Exception):
"""Customexceptionforgeoprocessingtoolcancellations"""
pass
#endregion
#regionPublicAPIFunctions
defclone(target,item,folder_name=None,existing_items=[]):
"""Cloneanitemtoaportal.Ifawebmaporapplicationispassedin,allservicesandgroupsthatsupporttheapplicationorwebmapwillalsobecloned.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theorganization)toclonetheitemsto.
item-Thearcgis.GIS.Itemtoclone.
folder_name-Thenameofthefoldertoclonethenewitemsto.Ifthefolderdoesnotalreadyexistitwillbecreated.
existing_items-Acollectionofitemsalreadyclonedthatshouldbereusedratherthancreatinganewversionoftheitemintheorg.
TheseitemsareusedfirstbeforesearchingtheorgiftheSEARCH_ORG_FOR_EXISTING_ITEMSvariableissettotrue.
"""
item_mapping={'ItemIDs':{},'GroupIDs':{},'FeatureServices':{}}
created_items=[]
global_TEMP_DIR
_TEMP_DIR=tempfile.TemporaryDirectory()
try:
#Checkiftheitemhasalreadybeenclonedintothetargetportal
existing_item=_get_existing_item(existing_items,item)
ifSEARCH_ORG_FOR_EXISTING_ITEMSandexisting_itemisNone:
existing_item=_search_org_for_existing_item(target,item)
ifexisting_itemisnotNone:
_add_message("{0}alreadyexistsinOrganization".format(item['title']))
_add_message("Newitemid:{0}".format(existing_item['id']))
_add_message('------------------------')
returncreated_items
user=target.users.me
folder=None
iffolder_nameisnotNone:
folders=user.folders
folder=next((fforfinfoldersiff['title'].lower()==folder_name.lower()),None)
iffolderisNone:
folder=target.content.create_folder(folder_name)
#Checkifthetoolhasbeencancelledbeforefetchingitemdefinitions
_check_cancel_status()
#Getthedefinitionsassociatedwiththeitem
item_definitions=[]
_get_item_definitions(item,item_definitions)
item_definitions=sorted(item_definitions,key=_sort_item_types)
#Testiftheuserhasthecorrectprivilegestocreatetheitemsrequested
if'privileges'inuseranduser['privileges']isnotNone:
privileges=user.privileges
foritem_definitioninitem_definitions:
ifisinstance(item_definition,_ItemDefinition):
if'portal:user:createItem'notinprivileges:
raiseException("Tocreatethisitemyoumusthavepermissiontocreatenewcontentinthetargetorganization.")
ifisinstance(item_definition,_GroupDefinition):
if'portal:user:createGroup'notinprivilegesor'portal:user:shareToGroup'notinprivileges:
raiseException("Tocreatethisitemyoumusthavepermissiontocreatenewgroupsandsharecontenttogroupsinthetargetorganization.")
ifisinstance(item_definition,_FeatureServiceDefinition):
if'portal:publisher:publishFeatures'notinprivileges:
raiseException("Tocreatethisitemyoumusthavepermissiontopublishhostedfeaturelayersinthetargetorganization.")
#Clonethegroups
forgroupin[gforginitem_definitionsifisinstance(g,_GroupDefinition)]:
_check_cancel_status()
item_definitions.remove(group)
original_group=group.info
new_group=_get_existing_item(existing_items,original_group,"Group")
ifSEARCH_ORG_FOR_EXISTING_ITEMSandnew_groupisNone:
new_group=_search_for_existing_group(user,original_group)
ifnotnew_group:
new_group=group.clone(target)
created_items.append(new_group)
_add_message("CreatedGroup{0}".format(new_group['title']))
else:
_add_message("ExistingGroup{0}foundinOrganization".format(new_group['title']))
item_mapping['GroupIDs'][original_group['id']]=new_group['id']
#Clonetheitems
foritem_definitioninitem_definitions:
_check_cancel_status()
original_item=item_definition.info
new_item_created=False
result=[]
new_item=_get_existing_item(existing_items,original_item)
ifnew_itemisNone:
if_TARGET_MUST_EXIST_TAGinoriginal_item['tags']:
new_item=_search_org_for_existing_item(target,original_item)
ifnew_itemisNone:
raiseException("Failedtofind{0}{1}inOrganization".format(original_item['type'],original_item['title']))
elifSEARCH_ORG_FOR_EXISTING_ITEMS:
new_item=_search_org_for_existing_item(target,original_item)
ifnotnew_item:
result=item_definition.clone(target,folder,item_mapping)
new_item=result[0]
new_item_created=True
created_items.append(new_item)
_add_message("Created{0}{1}".format(new_item['type'],new_item['title']))
else:
_add_message("Existing{0}{1}foundinOrganization".format(original_item['type'],original_item['title']))
ifnew_item['owner']==user['username']:
_share_item_with_groups(new_item,item_definition.sharing,item_mapping['GroupIDs'])
item_mapping['ItemIDs'][original_item['id']]=new_item['id']
ifisinstance(item_definition,_ApplicationDefinition):
#WithPortalsometimesaftersharingtheapplicationtheurlisreset.
#Checkiftheurlisincorrectaftersharingandsetbacktocorrecturl.
if'url'innew_itemandnew_item['url']isnotNone:
url=new_item['url']
new_item=target.content.get(new_item['id'])
ifnew_item['url']!=url:
new_item.update({'url':url})
ifisinstance(item_definition,_FeatureServiceDefinition):
#NeedtohandleFeatureServicesspeciallyastheirlayeridsandfieldsnamescan
#changeduringcreation.EspeciallywhengoingfromOnlinetoPortal
layer_field_mapping={}
layer_id_mapping={}
layer_fields={}
relationship_field_mapping={}
ifnotnew_item_created:
new_feature_service=FeatureLayerCollection.fromitem(new_item)
new_layers=new_feature_service.layers+new_feature_service.tables
fornew_layerinnew_layers:
layer_fields[new_layer.properties.id]=new_layer.properties.fields
original_layers_definition=item_definition.layers_definition
original_layers=original_layers_definition['layers']+original_layers_definition['tables']
iflen(original_layers)>len(new_layers):
raiseException('{0}{1}layersandtablesmustmatchthesource{0}'.format(new_item['type'],new_item['title']))
#Getamappingbetweenlayerids,fieldsandrelatedfields
original_layer_ids=[original_layer['id']fororiginal_layerinoriginal_layers]
new_layer_ids=[new_layer.properties['id']fornew_layerinnew_layers]
new_layer_names=[new_layer.properties['name']fornew_layerinnew_layers]
forlayerinoriginal_layers:
try:
new_layer=new_layers[new_layer_names.index(layer['name'])]
layer_id_mapping[layer['id']]=new_layer.properties['id']
new_layer_ids.remove(new_layer.properties['id'])
original_layer_ids.remove(layer['id'])
exceptValueError:
pass
foridinoriginal_layer_ids:
layer_id_mapping[id]=new_layer_ids.pop(0)
fororiginal_id,new_idinlayer_id_mapping.items():
field_mapping={}
forlayerinoriginal_layers:
iflayer['id']==original_id:
new_layer=next((lforlinnew_layersifl.properties['id']==new_id),None)
original_fields=_deep_get(layer,'fields')
new_fields=_deep_get(new_layer.properties,'fields')
ifnew_fieldsisNoneororiginal_fieldsisNone:
continue
new_fields_lower=[f['name'].lower()forfinnew_fields]
if'editFieldsInfo'inlayerandlayer['editFieldsInfo']isnotNone:
if'editFieldsInfo'innew_layer.propertiesandnew_layer.properties['editFieldsInfo']isnotNone:
foreditor_fieldin['creationDateField','creatorField','editDateField','editorField']:
original_editor_field_name=_deep_get(layer,'editFieldsInfo',editor_field)
new_editor_field_name=_deep_get(new_layer.properties,'editFieldsInfo',editor_field)
iforiginal_editor_field_name!=new_editor_field_name:
iforiginal_editor_field_nameisnotNoneandoriginal_editor_field_name!=""andnew_editor_field_nameisnotNoneandnew_editor_field_name!="":
field_mapping[original_editor_field_name]=new_editor_field_name
original_oid_field=_deep_get(layer,'objectIdField')
new_oid_field=_deep_get(new_layer.properties,'objectIdField')
iforiginal_oid_field!=new_oid_field:
iforiginal_oid_fieldisnotNoneandoriginal_oid_field!=""andnew_oid_fieldisnotNoneandnew_oid_field!="":
field_mapping[original_oid_field]=new_oid_field
original_globalid_field=_deep_get(layer,'globalIdField')
new_globalid_field=_deep_get(new_layer.properties,'globalIdField')
iforiginal_globalid_field!=new_globalid_field:
iforiginal_globalid_fieldisnotNoneandoriginal_globalid_field!=""andnew_globalid_fieldisnotNoneandnew_globalid_field!="":
field_mapping[original_globalid_field]=new_globalid_field
forfieldinoriginal_fields:
iffield['name']infield_mapping:
continue
try:
new_field=new_fields[new_fields_lower.index(field['name'].lower())]
iffield['name']!=new_field['name']:
field_mapping[field['name']]=new_field['name']
exceptValueError:
pass
break
iflen(field_mapping)>0:
layer_field_mapping[original_id]=field_mapping
forlayerinoriginal_layers:
layer_id=layer['id']
if'relationships'inlayerandlayer['relationships']isnotNone:
forrelationshipinlayer['relationships']:
related_table_id=relationship['relatedTableId']
ifrelated_table_idinlayer_field_mapping:
iflayer_idnotinrelationship_field_mapping:
relationship_field_mapping[layer_id]={}
field_mapping=layer_field_mapping[related_table_id]
relationship_field_mapping[layer_id][relationship['id']]=field_mapping
else:
layer_field_mapping=result[1]
layer_id_mapping=result[2]
layer_fields=result[3]
relationship_field_mapping=result[4]
item_mapping['FeatureServices'][original_item['url']]={'id':new_item['id'],'url':new_item['url'],'layer_field_mapping':layer_field_mapping,'layer_id_mapping':layer_id_mapping,'layer_fields':layer_fields,'relationship_field_mapping':relationship_field_mapping}
#Updateformdata
forformin[iforiinitem_definitionsifisinstance(i,_FormDefinition)]:
_check_cancel_status()
original_item=form.info
new_item=_get_existing_item(created_items,original_item)
ifnew_itemisnotNone:
form.update_form(target,new_item,item_mapping)
_add_message("UpdatedForm{0}".format(new_item['title']))
existing_item=_search_org_for_existing_item(target,item)
_add_message('Successfullyadded{0}'.format(item['title']))
ifexisting_item:
_add_message("Newitemid:{0}".format(existing_item['id']))
_add_message('------------------------')
returncreated_items
exceptExceptionasex:
ifisinstance(ex,_ItemCreateException):
_add_message(ex.args[0],'Error')
ifisinstance(ex.args[1],(gis.Item,gis.Group)):
created_items.append(ex.args[1])
elifisinstance(ex,_CustomCancelException):
iflen(ex.args)>0andisinstance(ex.args[0],(gis.Item,gis.Group)):
created_items.append(ex.args[0])
_add_message("{0}canceled".format(item['title']),'Error')
else:
_add_message(str(ex),'Error')
forcreated_iteminreversed(created_items):
try:
ifcreated_itemisnotNone:
ifcreated_item.delete():
ifisinstance(created_item,gis.Group):
created_item['type']='Group'
_add_message("Deleted{0}{1}".format(created_item['type'],created_item['title']))
exceptException:
continue
_add_message('Failedtoadd{0}'.format(item['title']),'Error')
_add_message('------------------------')
return[]
finally:
_TEMP_DIR.cleanup()
#endregion
#regionPrivateAPIFunctions
def_get_item_definitions(item,item_definitions):
""""Getalistofdefinitionsforthespecifieditem.
Thismethoddiffersfromget_item_definitioninthatitisrunrecursivelytoreturnthedefinitionsoffeatureserviceitemsthatmakeupawebmapandthegroupsandwebmapsthatmakeupanapplication.
Thesedefinitionscanbeusedtocloneordownloadtheitems.
Keywordarguments:
item-Thearcgis.GIS.Itemtogetthedefinitionfor
item_definitions-Alistofitemandgroupdefinitions.Whenfirstcalledthisshouldbeanemptylistthatyouholdareferencetoandalldefinitionsrelatedtotheitemwillbeappendedtothelist.
"""
item_definition=None
source=item._gis
#Checkiftheitemdefinitionhasalreadybeenaddedtothecollectionofitemdefinitions
item_definition=next((iforiinitem_definitionsifi.info['id']==item.id),None)
ifitem_definition:
returnitem_definition
#iftheitemisagroupfindallthewebmapsthataresharedwiththegroup
ifisinstance(item,gis.Group):
item_definition=_get_group_definition(item)
item_definitions.append(item_definition)
group_id=item['id']
search_query='group:{0}'.format(group_id)
group_items=source.content.search(search_query,max_items=1000)
forgroup_itemingroup_items:
item_definition=_get_item_definitions(group_item,item_definitions)
ifitem_definitionisnotNone:
item_definition.sharing['groups'].append(group_id)
#Iftheitemishasthecopy-onlytagthanwedon'tneedtodoadeepclone,justcopytheitemasis
elif_COPY_ONLY_TAGinitem['tags']:
item_definition=_get_item_definition(item)
item_definitions.append(item_definition)
#Iftheitemisanapplicationordashboardfindthewebmaporgroupthattheapplicationreferencing
elifitem['type']in['WebMappingApplication','OperationView','Dashboard']:
item_definition=_get_item_definition(item)
item_definitions.append(item_definition)
webmap_ids=[]
app_json=item_definition.data
ifapp_jsonisnotNone:
if'StoryMap'initem['typeKeywords']or'StoryMaps'initem['typeKeywords']:
webmap_ids=[]
elifitem['type']in["OperationView","Dashboard"]:#OperationsDashboard
if'widgets'inapp_json:
forwidgetinapp_json['widgets']:
ifwidget['type']=='mapWidget':
if'itemId'inwidget:
webmap_ids.append(widget['itemId'])
elif'mapId'inwidget:
webmap_ids.append(widget['mapId'])
elif"WebAppBuilder"initem['typeKeywords']:#WebAppBuilder
if'map'inapp_json:
if'itemId'inapp_json['map']:
webmap_ids.append(app_json['map']['itemId'])
else:#ConfigurableApplicationTemplate
if'values'inapp_json:
if'group'inapp_json['values']:
group_id=app_json['values']['group']
try:
group=source.groups.get(group_id)
exceptRuntimeError:
_add_message("Failedtogetgroup{0}".format(group_id),'Error')
raise
_get_item_definitions(group,item_definitions)
if'webmap'inapp_json['values']:
ifisinstance(app_json['values']['webmap'],list):
webmap_ids.extend(app_json['values']['webmap'])
else:
webmap_ids.append(app_json['values']['webmap'])
forwebmap_idinwebmap_ids:
try:
webmap=source.content.get(webmap_id)
exceptRuntimeError:
_add_message("Failedtogetwebmap{0}".format(webmap_id),'Error')
raise
_get_item_definitions(webmap,item_definitions)
#Iftheitemisawebmapfindallthefeatureservicelayersandtablesthatmakeupthemap
elifitem['type']=='WebMap':
item_definition=_get_item_definition(item)
item_definitions.append(item_definition)
webmap_json=item_definition.data
featurelayer_services=[]
feature_collections=[]
if'operationalLayers'inwebmap_json:
featurelayer_services+=[layerforlayerinwebmap_json['operationalLayers']if'layerType'inlayerandlayer['layerType']=="ArcGISFeatureLayer"and'url'inlayerandlayer['url']isnotNoneand('type'notinlayerorlayer['type']!="FeatureCollection")]
feature_collections+=[layerforlayerinwebmap_json['operationalLayers']if'layerType'inlayerandlayer['layerType']=="ArcGISFeatureLayer"and'type'inlayerandlayer['type']=="FeatureCollection"]
if'tables'inwebmap_json:
featurelayer_services+=[tablefortableinwebmap_json['tables']if'url'intable]
forlayerinfeaturelayer_services:
service_url=os.path.dirname(layer['url'])
feature_service=next((definitionfordefinitioninitem_definitionsif'url'indefinition.infoand_compare_url(definition.info['url'],service_url)),None)
ifnotfeature_service:
feature_service=_get_feature_service_related_item(service_url,source)
iffeature_service:
_get_item_definitions(feature_service,item_definitions)
forfeature_collectioninfeature_collections:
if'itemId'infeature_collectionandfeature_collection['itemId']isnotNone:
feature_collection=source.content.get(feature_collection['itemId'])
_get_item_definitions(feature_collection,item_definitions)
#Iftheitemisafeatureservicedetermineifitisaviewandifitisfindallit'ssources
elifitem['type']=='FeatureService':
svc=FeatureLayerCollection.fromitem(item)
service_definition=dict(svc.properties)
is_view=False
if"isView"inservice_definitionandservice_definition["isView"]isnotNone:
is_view=service_definition["isView"]
#Getthedefinitionsofthethelayersandtables
layers_definition={'layers':[],'tables':[]}
forlayerinsvc.layers:
layers_definition['layers'].append(dict(layer.properties))
fortableinsvc.tables:
layers_definition['tables'].append(dict(table.properties))
#Gettheitemdata,forexampleanypopupdefinitionassociatedwiththeitem
data=item.get_data()
#Processthefeatureserviceifitisaview
view_sources={}
view_source_fields={}
ifis_view:
try:
multiple_source_error="Viewsbasedonmultiplesourcelayersarenotsupported."
sources=source._portal.con.get(svc.url+'/sources')
iflen(sources['services'])!=1:
raiseException(multiple_source_error)
source_service=sources['services'][0]
source_item=source.content.get(source_service['serviceItemId'])
_get_item_definitions(source_item,item_definitions)
forlayerinsvc.layers+svc.tables:
layer_sources=source._portal.con.get(svc.url+'/'+str(layer.properties['id'])+'/sources')
layer_source=None
if'layers'inlayer_sourcesandlen(layer_sources['layers'])==1:
layer_source=layer_sources['layers'][0]
elif'tables'inlayer_sourcesandlen(layer_sources['tables'])==1:
layer_source=layer_sources['tables'][0]
else:
raiseException(multiple_source_error)
view_sources[layer.properties['id']]=layer_source['url']
feature_layer=FeatureLayer(layer_source['url'],source)
view_source_fields[layer.properties['id']]=feature_layer.properties.fields
exceptRuntimeError:
_add_message("Failedtogetfeaturelayerviewanditssources{0}".format(item['id']),'Error')
raise
item_definition=_FeatureServiceDefinition(dict(item),service_definition,layers_definition,is_view,view_sources,view_source_fields,features=None,data=data,thumbnail=None,portal_item=item)
item_definitions.append(item_definition)
#Iftheitemisaworkforcefindthegroup,mapsandservicesthatsupporttheproject
elifitem['type']=='WorkforceProject':
workforce_json=item.get_data()
#Workforcegroup
group_id=_deep_get(workforce_json,'groupId')
group=source.groups.get(group_id)
item_definition=_get_group_definition(group)
item_definitions.append(item_definition)
#Processtheservices
services=['dispatchers','assignments','workers','tracks']
forserviceinservices:
item_id=_deep_get(workforce_json,service,'serviceItemId')
ifitem_idisnotNone:
service_item=source.content.get(item_id)
item_definition=_get_item_definitions(service_item,item_definitions)
item_definition.sharing['groups'].append(group_id)
#Processthewebmaps
web_maps=['workerWebMapId','dispatcherWebMapId']
forweb_mapinweb_maps:
item_id=_deep_get(workforce_json,web_map)
ifitem_idisnotNone:
web_map_item=source.content.get(item_id)
item_definition=_get_item_definitions(web_map_item,item_definitions)
item_definition.sharing['groups'].append(group_id)
#Handleanyappintegrations
integrations=_deep_get(workforce_json,'assignmentIntegrations')
ifintegrationsisnotNone:
forintegrationinintegrations:
url_template=_deep_get(integration,'urlTemplate')
ifurl_templateisnotNone:
item_ids=re.findall('itemID=[0-9A-F]{32}',url_template,re.IGNORECASE)
foritem_idinitem_ids:
integration_item=source.content.get(item_id[7:])
_get_item_definitions(integration_item,item_definitions)
item_definition=_get_item_definition(item)
item_definition.sharing['groups'].append(group_id)
item_definitions.append(item_definition)
#Iftheitemisaformfindthefeatureservicethatsupportsit
elifitem['type']=='Form':
item_definition=_get_item_definition(item)
item_definitions.append(item_definition)
forrelated_iteminitem_definition.related_items:
_get_item_definitions(source.content.get(related_item['id']),item_definitions)
#Iftheitemisapromapfindthefeatureservicesthatsupportsit
elifitem['type']=='ProMap':
item_definition=_get_item_definition(item)
item_definitions.append(item_definition)
map_json=None
withopen(item_definition.data,'r')asfile:
map_json=json.loads(file.read())
data_connections=[]
layer_definitions=_deep_get(map_json,'layerDefinitions')
iflayer_definitionsisnotNone:
forlayer_definitioninlayer_definitions:
data_connection=_deep_get(layer_definition,'featureTable','dataConnection')
ifdata_connectionisnotNone:
data_connections.append(data_connection)
table_definitions=_deep_get(map_json,'tableDefinitions')
iftable_definitionsisnotNone:
fortable_definitionintable_definitions:
data_connection=_deep_get(table_definition,'dataConnection')
ifdata_connectionisnotNone:
data_connections.append(data_connection)
fordata_connectionindata_connections:
if'workspaceFactory'indata_connectionanddata_connection['workspaceFactory']=='FeatureService':
if'workspaceConnectionString'indata_connectionanddata_connection['workspaceConnectionString']isnotNone:
service_url=data_connection['workspaceConnectionString'][4:]
feature_service=next((definitionfordefinitioninitem_definitionsif'url'indefinition.infoand_compare_url(definition.info['url'],service_url)),None)
ifnotfeature_service:
feature_service=_get_feature_service_related_item(service_url,source)
iffeature_service:
_get_item_definitions(feature_service,item_definitions)
#Iftheitemisaproprojectfindthefeatureservicesthatsupportsit
elifitem['type']=='ProjectPackage':
item_definition=_get_item_definition(item)
item_definitions.append(item_definition)
try:
importarcpy
ppkx=item_definition.data
extract_dir=os.path.join(os.path.dirname(ppkx),'extract')
ifnotos.path.exists(extract_dir):
os.makedirs(extract_dir)
arcpy.ExtractPackage_management(ppkx,extract_dir)
#1.xversionsofProuseadifferentfoldername
project_folder='p20'
version=arcpy.GetInstallInfo()['Version']
ifversion.startswith('1'):
project_folder='p12'
project_dir=os.path.join(extract_dir,project_folder)
ifos.path.exists(project_dir):
aprx_files=[fforfinos.listdir(project_dir)iff.endswith('.aprx')]
iflen(aprx_files)==1:
aprx_file=os.path.join(project_dir,aprx_files[0])
aprx=arcpy.mp.ArcGISProject(aprx_file)
maps=aprx.listMaps()
formapinmaps:
layers=[lforlinmap.listLayers()ifl.supports('connectionProperties')]
layers.extend(map.listTables())
forlyrinlayers:
connection_properties=lyr.connectionProperties
workspace_factory=_deep_get(connection_properties,'workspace_factory')
service_url=_deep_get(connection_properties,'connection_info','url')
ifworkspace_factory=='FeatureService'andservice_urlisnotNone:
feature_service=next((definitionfordefinitioninitem_definitionsif'url'indefinition.infoand_compare_url(definition.info['url'],service_url)),None)
ifnotfeature_service:
feature_service=_get_feature_service_related_item(service_url,source)
iffeature_service:
_get_item_definitions(feature_service,item_definitions)
exceptImportError:
pass
#Iftheitemisacodeattachmentignoreit
elifitem['type']=='CodeAttachment':
pass
#Allothertypeswenolongerneedtorecursivelylookforrelateditems
else:
item_definition=_get_item_definition(item)
item_definitions.append(item_definition)
returnitem_definition
def_get_group_definition(group):
"""Getaninstanceofthegroupdefinitionforthespecifieditem.Thisdefinitioncanbeusedtocloneordownloadthegroup.
Keywordarguments:
group-Thearcgis.GIS.Grouptogetthedefinitionfor."""
return_GroupDefinition(dict(group),thumbnail=None,portal_group=group)
def_get_item_definition(item):
"""Getaninstanceofthecorrespondingdefinitionclassforthespecifieditem.Thisdefinitioncanbeusedtocloneordownloadtheitem.
Keywordarguments:
item-Thearcgis.GIS.Itemtogetthedefinitionfor.
"""
#IftheitemisanapplicationordashboardgettheApplicationDefinition
ifitem['type']in['WebMappingApplication','OperationView','Dashboard']:
app_json=None
source_app_title=None
update_url=False
try:
app_json=item.get_data()
exceptException:
pass#itemdoesn'thavejsondata
ifapp_jsonisnotNone:
update_url=True
if"WebAppBuilder"notinitem['typeKeywords']anditem['type']!='OperationView'and'source'inapp_json:
try:
source=item._gis
source_id=app_json['source']
app_item=source.content.get(source_id)
ifapp_itemisnotNone:
source_app_title=app_item['title']
exceptException:
pass
return_ApplicationDefinition(dict(item),source_app_title=source_app_title,update_url=update_url,data=app_json,thumbnail=None,portal_item=item)
#IftheitemisawebmapgettheWebMapDefintion
elifitem['type']=='WebMap':
webmap_json=item.get_data()
return_WebMapDefinition(dict(item),data=webmap_json,thumbnail=None,portal_item=item)
#IftheitemisaworkforceprojectgettheWorkforceProjectDefintion
elifitem['type']=='WorkforceProject':
workforce_json=item.get_data()
return_WorkforceProjectDefinition(dict(item),data=workforce_json,thumbnail=None,portal_item=item)
#IftheitemisaworkforceprojectgettheWorkforceProjectDefintion
elifitem['type']=='Form':
related_items=_get_related_items(item,'Survey2Service')
return_FormDefinition(dict(item),related_items=related_items,data=None,thumbnail=None,portal_item=item)
#IftheitemisafeatureservicegettheFeatureServiceDefintion
elifitem['type']=='FeatureService':
svc=FeatureLayerCollection.fromitem(item)
service_definition=dict(svc.properties)
#Getthedefinitionsofthethelayersandtables
layers_definition={'layers':[],'tables':[]}
forlayerinsvc.layers:
layers_definition['layers'].append(dict(layer.properties))
fortableinsvc.tables:
layers_definition['tables'].append(dict(table.properties))
#Gettheitemdata,forexampleanypopupdefinitionassociatedwiththeitem
data=item.get_data()
return_FeatureServiceDefinition(dict(item),service_definition,layers_definition,features=None,data=data,thumbnail=None,portal_item=item)
#IftheitemisafeaturecollectiongettheFeatureCollectionDefintion
elifitem['type']=='FeatureCollection':
return_FeatureCollectionDefinition(dict(item),data=item.get_data(),thumbnail=None,portal_item=item)
#IftheitemisapromapgettheProMapDefintion
elifitem['type']=='ProMap':
temp_dir=os.path.join(_TEMP_DIR.name,item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
pro_map=item.download(temp_dir)
return_ProMapDefinition(dict(item),data=pro_map,thumbnail=None,portal_item=item)
#IftheitemisapropackagegettheProProjectPackageDefintion
elifitem['type']=='ProjectPackage':
temp_dir=os.path.join(_TEMP_DIR.name,item['id'])
ifnotos.path.exists(temp_dir):
os.makedirs(temp_dir)
pro_package=item.download(temp_dir)
return_ProProjectPackageDefinition(dict(item),data=pro_package,thumbnail=None,portal_item=item)
#Forallothertypesgetthecorrespondingdefinition
else:
ifitem['type']in_TEXT_BASED_ITEM_TYPES:
return_TextItemDefinition(dict(item),data=item.get_data(),thumbnail=None,portal_item=item)
return_ItemDefinition(dict(item),data=None,thumbnail=None,portal_item=item)
def_get_feature_service_related_item(service_url,source):
try:
service=FeatureLayerCollection(service_url,source)
exceptException:
_add_message("Featurelayer{0}isnotahostedfeatureservice.Itwillnotbecloned.".format(service_url),'Warning')
return
item_id=None
if'serviceItemId'notinservice.propertiesorservice.properties['serviceItemId']isNone:
document_info=_deep_get(service.properties,'documentInfo')
ifdocument_infoisnotNone:
item_ids=re.findall('serviceitemid:[0-9A-F]{32}',json.dumps(document_info),re.IGNORECASE)
iflen(item_ids)>0:
item_id=item_ids[0][len('serviceitemid:'):]
else:
item_id=service.properties['serviceItemId']
try:
ifitem_idisnotNone:
returnsource.content.get(item_id)
_add_message("Featurelayer{0}isnotahostedfeatureservice.Itwillnotbecloned.".format(service_url),'Warning')
exceptRuntimeError:
_add_message("Failedtogetfeatureserviceitem{0}".format(item_id),'Error')
raise
def_add_message(message,message_type='Info'):
"""Addamessagetotheoutput"""
try:
importarcpy
ifmessage_type=='Info':
arcpy.AddMessage(message)
elifmessage_type=='Warning':
arcpy.AddWarning(message)
elifmessage_type=='Error':
arcpy.AddError(message)
exceptImportError:
print(message)
def_get_extent_definition(original_extent,item_extent,new_spatial_ref):
"""Gettheextentdefinitionforthefeatureservicebasedontheuserspecifiedextentandspatialreference.
Thisfunctionrequiresthearcpymoduletoprojectthespecifiedordefaultextentintothespecifiedspatialreference.
IfthemoduleisnotavailableorifneithertheITEM_EXTENTorSPATIAL_REFERENCEarespecifiedthedefaultserviceextentwillbereturned.
extent-Thejsonrepresentationofanextent."""
try:
extent=original_extent
ifitem_extentisNoneandnew_spatial_refisNone:
returnextent
importarcpy
new_extent=original_extent
ifitem_extentisnotNone:
extent_list=item_extent.split(",")
new_extent={"xmin":extent_list[0],"ymin":extent_list[1],"xmax":extent_list[2],"ymax":extent_list[3],"spatialReference":{'wkid':4326}}
coordinates=[[new_extent['xmin'],new_extent['ymin']],
[new_extent['xmax'],new_extent['ymin']],
[new_extent['xmax'],new_extent['ymax']],
[new_extent['xmin'],new_extent['ymax']],
[new_extent['xmin'],new_extent['ymin']]]
original_sr=arcpy.SpatialReference()
if'wkid'innew_extent['spatialReference']:
original_sr=arcpy.SpatialReference(new_extent['spatialReference']['wkid'])
elif'wkt'innew_extent['spatialReference']:
original_sr.loadFromString(new_extent['spatialReference']['wkt'])
polygon=arcpy.Polygon(arcpy.Array([arcpy.Point(*coords)forcoordsincoordinates]),original_sr)
spatial_reference=original_extent['spatialReference']
ifnew_spatial_refisnotNone:
spatial_reference={'wkid':new_spatial_ref}
new_sr=arcpy.SpatialReference()
if'wkid'inspatial_reference:
new_sr=arcpy.SpatialReference(spatial_reference['wkid'])
elif'wkt'inspatial_reference:
new_sr.loadFromString(spatial_reference['wkt'])
extent_geometry=polygon.extent.projectAs(new_sr)
extent={"xmin":extent_geometry.XMin,"ymin":extent_geometry.YMin,"xmax":extent_geometry.XMax,"ymax":extent_geometry.YMax,"spatialReference":spatial_reference}
exceptImportError:
pass
returnextent
def_search_org_for_existing_item(target,item):
"""Searchforanitemwithaspecifictypekeywordortag.
Thisisusedtodetermineiftheitemhasalreadybeenclonedinthefolder.
Keywordarguments:
target-Theportalthatitemswillbeclonedto.
item-Theoriginalitemusedtodetermineifithasalreadybeenclonedtothespecifiedfolder."""
search_query='typekeywords:source-{0}type:{1}'.format(item['id'],item['type'])
items=target.content.search(search_query,max_items=100,outside_org=False)
search_query='tags:source-{0}type:{1}'.format(item['id'],item['type'])
items.extend(target.content.search(search_query,max_items=100,outside_org=False))
existing_item=None
iflen(items)>0:
existing_item=max(items,key=lambdax:x['created'])
returnexisting_item
def_search_for_existing_group(user,group):
"""Testifagroupwithagivensourcetagthattheuserisamemberofalreadyexistsintheorganization.
Thisisusedtodetermineifthegrouphasalreadybeencreatedandifnewmapsandappsthatbelongtothesamegroupshouldbesharedtothesamegroup.
Keywordarguments:
user-Thegis.Usertosearchthroughtheirgroupmembership.
group-Theoriginalgroupusedtodetermineifithasalreadybeenclonedintheorganization."""
existing_group=None
if'groups'inuseranduser['groups']isnotNone:
groups=[gforginuser['groups']if"source-{0}".format(group['id'])ing['tags']]
iflen(groups)>0:
existing_group=max(groups,key=lambdax:x['created'])
returnexisting_group
def_get_existing_item(existing_items,item,item_type="Item"):
"""Findthecloneditemfromalistofitems.
Keywordarguments:
existing_items-Thelistofitemstosearchthroughtofindiftheitemhasalreadybeencloned.
item-Theoriginalitemtobecloned.
type-Thetypeofitembeingsearch,optionsare'Item'or'Group'"""
ifitem_type=="Item":
forexisting_itemin[iforiinexisting_itemsifisinstance(i,gis.Item)]:
ifexisting_item['type']==item['type']:
forkeywordinexisting_item['typeKeywords']:
ifkeyword=="source-{0}".format(item['id']):
returnexisting_item
elifitem_type=="Group":
forexisting_groupin[iforiinexisting_itemsifisinstance(i,gis.Group)]:
fortaginexisting_group['tags']:
iftag=="source-{0}".format(item['id']):
returnexisting_group
returnNone
def_share_item_with_groups(item,sharing,group_mapping):
"""Sharethenewitemusingthesharingpropertiesoforiginalitemandgroupmapping.
Keywordarguments:
item-Theitemtoshare
sharing-thesharingpropertiesoftheoriginalitem
group_mapping-Adictionarycontainingtheidoftheoriginalgroupandtheidofthenewgroup"""
ifsharing:
groups=[]
forgroupinsharing['groups']:
ifgroupingroup_mapping:
groups.append(group_mapping[group])
iflen(groups)==0:
return
everyone=False
org=False
if'access'initemanditem['access']isnotNone:
everyone=item['access']=='public'
org=item['access']=='org'
item.share(everyone,org,','.join(groups))
def_create_service(target,service_type,create_params,is_view,folder):
"""Createanewservice.
Keywordarguments:
target-Theinstanceofarcgis.gis.GIS(theportal)tocreatetheservice
service_type-Thetypeofservice
create_params-Theserviceparameters
is_view-Indicatesiftheserviceshouldbeaview
folder-thefoldertocreatetheservicein"""
portal=target._portal
postdata=portal._postdata()
owner=portal.logged_in_user()['username']
#Setuptheitempath,includingthefolder,andposttoit
path='content/users/'+owner
iffolderandfolder!='/':
folder_id=portal.get_folder_id(owner,folder)
path+='/'+folder_id
path+='/createService'
postdata['createParameters']=json.dumps(create_params)
postdata['outputType']=service_type
postdata['isView']=is_view
resp=portal.con.post(path,postdata)
ifrespandresp.get('success'):
returntarget.content.get(resp['itemId'])
returnNone
def_add_to_definition(feature_layer_manager,definition):
"""Createanewservice.
Keywordarguments:
feature_layer_manager-TheinstanceofFeatureLayerManageroftheservicetoedit
definition-Thedefinitionasastringtoaddtotheservice"""
params={
"f":"json",
"addToDefinition":definition,
}
u_url=feature_layer_manager._url+"/addToDefinition"
res=feature_layer_manager._con.post(u_url,params)
feature_layer_manager.refresh()
returnres
def_get_related_items(item,rel_type,direction="forward"):
"""Gettherelateditemsforagivenitem.
Keywordarguments:
item-Theitemtogettherelatesfor
rel_type-Therelationshiptype
direction-Thedirectionoftherelationship"""
source=item._gis
related_items=[]
postdata={'f':'json'}
postdata['relationshipType']=rel_type
postdata['direction']=direction
resp=source._portal.con.post('content/items/'+item.id+'/relatedItems',postdata)
forrelated_iteminresp['relatedItems']:
related_items.append(source.content.get(related_item['id']))
returnrelated_items
def_add_relationship(origin_item,destination_item,rel_type):
"""Addarelationshiptoanitem.
Keywordarguments:
origin_item-Theoriginitem
destination_item-Thedestinationitem
rel_type-Therelationshiptype"""
postdata={'f':'json'}
postdata['originItemId']=origin_item.id
postdata['destinationItemId']=destination_item.id
postdata['relationshipType']=rel_type
path='content/users/'+origin_item.owner
path+='/addRelationship'
origin_item._gis._portal.con.post(path,postdata)
def_get_version_management_server(target,feature_service):
"""Getstheurloftheportal/org
Keywordarguments:
target-Theportal/orgtogettheurlfor.
feature_service-Theurltothefeature_serviceintheportaltoretrievetheVersionManagerinfo."""
postdata={'f':'json'}
path=os.path.dirname(feature_service)
path+='/VersionManagementServer'
returntarget._portal.con.post(path,postdata)
def_get_org_url(target):
"""Getstheurloftheportal/org
Keywordarguments:
target-Theportal/orgtogettheurlfor."""
org_url=target._portal.url
properties=target.properties
scheme='http'
if'allSSL'inpropertiesandproperties['allSSL']:
scheme='https'
if'urlKey'inpropertiesand'customBaseUrl'inproperties:
org_url="{0}://{1}.{2}/".format(scheme,properties['urlKey'],properties['customBaseUrl'])
else:
url=urlparse(org_url)
org_url=org_url.replace(url.scheme,scheme)
returnorg_url
def_compare_url(url1,url2):
"""ComparetwoURLsignoringscheme
Keywordarguments:
url1-Thefirsturl
url2-Thesecondurl"""
url_parse1=urlparse(url1)
url_parse2=urlparse(url2)
return"{0}{1}".format(url_parse1.netloc.lower(),url_parse1.path.lower())=="{0}{1}".format(url_parse2.netloc.lower(),url_parse2.path.lower())
def_sort_item_types(item_definition):
"""Sorttheitemdefinitionsintheorderthatitemsneedtobecloned.
Keywordarguments:
item_definition-Theitemdefinitiontodetermineitssortorder"""
order=0
ifisinstance(item_definition,_FormDefinition):
order=0
elifisinstance(item_definition,_FeatureServiceDefinition)andnotitem_definition.is_view:
order=1
elifisinstance(item_definition,_FeatureServiceDefinition)anditem_definition.is_view:
order=2
elifisinstance(item_definition,_FeatureCollectionDefinition):
order=3
elifisinstance(item_definition,_WebMapDefinition):
order=4
elifisinstance(item_definition,_ApplicationDefinition):
order=5
elifisinstance(item_definition,_WorkforceProjectDefinition):
order=6
else:
order=7
returnorder
def_check_cancel_status(item=None):
"""IfthescriptisrunningasaGPtoolcheckifithasbeencanceled.
Keywordarguments:
item-Thenewitemthathasbeenpartiallycloned"""
try:
importarcpy
ifarcpy.env.isCancelled:
raise_CustomCancelException(item)
exceptImportError:
pass
def_find_and_replace_fields(text,field_mapping):
"""Performafindandreplaceforfieldnamesinajsondefinition.
Keywordarguments:
text-Thejsontosearchandreplacefieldsnames
field_mapping-Adictionarycontainingthepairsoforiginalfieldnamesandnewfieldnames"""
forfieldinfield_mapping:
replace=field_mapping[field]
results=set(re.findall('([{{("\[])({0})([}})"\]])'.format(field),text))
start=re.findall('(^{0})([}})"\]])'.format(field),text)
end=re.findall('([{{("\[])({0}$)'.format(field),text)
forelementinresults:
text=text.replace(''.join(element),''.join([element[0],replace,element[2]]))
iflen(start)>0:
new_start=''.join([replace,start[0][1]])
text=new_start+text[len(new_start):]
iflen(end)>0:
new_end=''.join([end[0][0],replace])
text=text[:len(text)-len(new_end)+1]+new_end
returntext
def_find_and_replace_fields_arcade(text,field_mapping):
"""Performafindandreplaceforfieldnamesinanarcadeexpression.
Keywordarguments:
text-Thearcadeexpressiontosearchandreplacefieldsnames
field_mapping-Adictionarycontainingthepairsoforiginalfieldnamesandnewfieldnames"""
forfieldinfield_mapping:
replace=field_mapping[field]
text=text.replace('$feature.{0}'.format(field),'$feature.{0}'.format(replace))
text=text.replace('$feature["{0}"]'.format(field),'$feature["{0}"]'.format(replace))
returntext
def_update_feature_attributes(feature,field_mapping):
"""Performafindandreplaceforfieldnamesinafeatureattributedefinition.
Keywordarguments:
feature-Thefeaturetosearchandreplacefieldsnames
field_mapping-Adictionarycontainingthepairsoforiginalfieldnamesandnewfieldnames"""
if'attributes'infeatureandfeature['attributes']isnotNone:
forattributein[attforattinfeature['attributes']]:
ifattributeinfield_mapping:
iffield_mapping[attribute]infeature['attributes']:
continue
feature['attributes'][field_mapping[attribute]]=feature['attributes'][attribute]
delfeature['attributes'][attribute]
def_update_layer_fields(layer,field_mapping):
"""Performafindandreplaceforfieldnamesinalayerdefinition.
Keywordarguments:
layer-Thelayertosearchandreplacefieldsnames
field_mapping-Adictionarycontainingthepairsoforiginalfieldnamesandnewfieldnames"""
if'layerDefinition'inlayerandlayer['layerDefinition']isnotNone:
layer_definition=layer['layerDefinition']
if'definitionExpression'inlayer_definitionandlayer_definition['definitionExpression']isnotNone:
layer_definition['definitionExpression']=_find_and_replace_fields(layer_definition['definitionExpression'],field_mapping)
if'drawingInfo'inlayer_definitionandlayer_definition['drawingInfo']isnotNone:
if'renderer'inlayer_definition['drawingInfo']andlayer_definition['drawingInfo']['renderer']isnotNone:
renderer=layer_definition['drawingInfo']['renderer']
ifrenderer['type']=='uniqueValue':
i=0
while'field{0}'.format(i)inrenderer:
ifrenderer['field{0}'.format(i)]infield_mapping:
renderer['field{0}'.format(i)]=field_mapping[renderer['field{0}'.format(i)]]
i+=1
elifrenderer['type']=='classBreaks':
if'field'inrenderer:
ifrenderer['field']infield_mapping:
renderer['field']=field_mapping[renderer['field']]
value_expression=_deep_get(renderer,"valueExpression")
ifvalue_expressionisnotNone:
renderer['valueExpression']=_find_and_replace_fields_arcade(str(value_expression),field_mapping)
labeling_infos=_deep_get(layer_definition['drawingInfo'],'labelingInfo')
iflabeling_infosisnotNone:
forlabel_infoinlabeling_infos:
label_expression=_deep_get(label_info,'labelExpression')
iflabel_expressionisnotNone:
results=re.findall("\[(.*?)\]",label_expression)
forresultinresults:
ifresultinfield_mapping:
label_info['labelExpression']=str(label_expression).replace("[{0}]".format(result),"[{0}]".format(field_mapping[result]))
value=_deep_get(label_info,'labelExpressionInfo','value')
ifvalueisnotNone:
results=re.findall("{(.*?)}",value)
forresultinresults:
ifresultinfield_mapping:
label_info['labelExpressionInfo']['value']=str(value).replace("{{{0}}}".format(result),"{{{0}}}".format(field_mapping[result]))
expression=_deep_get(label_info,'labelExpressionInfo','expression')
ifexpressionisnotNone:
label_info['labelExpressionInfo']['expression']=_find_and_replace_fields_arcade(str(expression),field_mapping)
if'popupInfo'inlayerandlayer['popupInfo']isnotNone:
if'title'inlayer['popupInfo']andlayer['popupInfo']['title']isnotNone:
results=re.findall("{(.*?)}",layer['popupInfo']['title'])
forresultinresults:
ifresultinfield_mapping:
layer['popupInfo']['title']=str(layer['popupInfo']['title']).replace("{{{0}}}".format(result),"{{{0}}}".format(field_mapping[result]))
if'description'inlayer['popupInfo']andlayer['popupInfo']['description']isnotNone:
results=re.findall("{(.*?)}",layer['popupInfo']['description'])
forresultinresults:
ifresultinfield_mapping:
layer['popupInfo']['description']=str(layer['popupInfo']['description']).replace("{{{0}}}".format(result),"{{{0}}}".format(field_mapping[result]))
if'fieldInfos'inlayer['popupInfo']andlayer['popupInfo']['fieldInfos']isnotNone:
forfieldinlayer['popupInfo']['fieldInfos']:
iffield['fieldName']infield_mapping:
field['fieldName']=field_mapping[field['fieldName']]
if'expressionInfos'inlayer['popupInfo']andlayer['popupInfo']['expressionInfos']isnotNone:
forexpression_infoinlayer['popupInfo']['expressionInfos']:
if'expression'inexpression_infoandexpression_info['expression']isnotNone:
expression_info['expression']=_find_and_replace_fields_arcade(str(expression_info['expression']),field_mapping)
if'mediaInfos'inlayer['popupInfo']andlayer['popupInfo']['mediaInfos']isnotNone:
formedia_infoinlayer['popupInfo']['mediaInfos']:
if'title'inmedia_infoandmedia_info['title']isnotNone:
results=re.findall("{(.*?)}",media_info['title'])
forresultinresults:
ifresultinfield_mapping:
media_info['title']=str(media_info['title']).replace("{{{0}}}".format(result),"{{{0}}}".format(field_mapping[result]))
if'caption'inmedia_infoandmedia_info['caption']isnotNone:
results=re.findall("{(.*?)}",media_info['caption'])
forresultinresults:
ifresultinfield_mapping:
media_info['caption']=str(media_info['caption']).replace("{{{0}}}".format(result),"{{{0}}}".format(field_mapping[result]))
if'normalizeField'inmedia_infoandmedia_info['normalizeField']isnotNone:
ifmedia_info['normalizeField']infield_mapping:
media_info['normalizeField']=field_mapping[media_info['normalizeField']]
if'fields'inmedia_infoandmedia_info['fields']isnotNone:
forfieldinmedia_info['fields']:
fields=[]
iffieldinfield_mapping:
fields.append(field_mapping[field])
else:
fields.append(field)
media_info['fields']=fields
if'definitionEditor'inlayerandlayer['definitionEditor']isnotNone:
if'inputs'inlayer['definitionEditor']andlayer['definitionEditor']['inputs']isnotNone:
fordefinition_inputinlayer['definitionEditor']['inputs']:
if'parameters'indefinition_inputanddefinition_input['parameters']isnotNone:
forparamindefinition_input['parameters']:
if'fieldName'inparamandparam['fieldName']isnotNone:
ifparam['fieldName']infield_mapping:
param['fieldName']=field_mapping[param['fieldName']]
if'parameterizedExpression'inlayer['definitionEditor']andlayer['definitionEditor']['parameterizedExpression']isnotNone:
layer['definitionEditor']['parameterizedExpression']=_find_and_replace_fields(layer['definitionEditor']['parameterizedExpression'],field_mapping)
def_update_layer_related_fields(layer,relationship_field_mapping):
"""Performafindandreplaceforfieldnamesinalayerdefinition.
Keywordarguments:
layer-Thelayertosearchandreplacefieldsnames
field_mapping-Adictionarycontainingthepairsoforiginalfieldnamesandnewfieldnames"""
forid,field_mappinginrelationship_field_mapping.items():
field_prefix="relationships/{0}/".format(id)
if'popupInfo'inlayerandlayer['popupInfo']isnotNone:
if'title'inlayer['popupInfo']andlayer['popupInfo']['title']isnotNone:
results=re.findall("{{{0}(.*?)}}".format(field_prefix),layer['popupInfo']['title'])
forresultinresults:
ifresultinfield_mapping:
layer['popupInfo']['title']=str(layer['popupInfo']['title']).replace("{{{0}{1}}}".format(field_prefix,result),"{{{0}{1}}}".format(field_prefix,field_mapping[result]))
if'description'inlayer['popupInfo']andlayer['popupInfo']['description']isnotNone:
results=re.findall("{{{0}(.*?)}}".format(field_prefix),layer['popupInfo']['description'])
forresultinresults:
ifresultinfield_mapping:
layer['popupInfo']['description']=str(layer['popupInfo']['description']).replace("{{{0}{1}}}".format(field_prefix,result),"{{{0}{1}}}".format(field_prefix,field_mapping[result]))
if'fieldInfos'inlayer['popupInfo']andlayer['popupInfo']['fieldInfos']isnotNone:
forfieldinlayer['popupInfo']['fieldInfos']:
iffield['fieldName'].startswith(field_prefix)andfield['fieldName'][len(field_prefix):]infield_mapping:
field['fieldName']="{0}{1}".format(field_prefix,field_mapping[field['fieldName'][len(field_prefix):]])
if'mediaInfos'inlayer['popupInfo']andlayer['popupInfo']['mediaInfos']isnotNone:
formedia_infoinlayer['popupInfo']['mediaInfos']:
if'title'inmedia_infoandmedia_info['title']isnotNone:
results=re.findall("{{{0}(.*?)}}".format(field_prefix),media_info['title'])
forresultinresults:
ifresultinfield_mapping:
media_info['title']=str(media_info['title']).replace("{{{0}{1}}}".format(field_prefix,result),"{{{0}{1}}}".format(field_prefix,field_mapping[result]))
if'caption'inmedia_infoandmedia_info['caption']isnotNone:
results=re.findall("{{{0}(.*?)}}".format(field_prefix),media_info['caption'])
forresultinresults:
ifresultinfield_mapping:
media_info['caption']=str(media_info['caption']).replace("{{{0}{1}}}".format(field_prefix,result),"{{{0}{1}}}".format(field_prefix,field_mapping[result]))
if'normalizeField'inmedia_infoandmedia_info['normalizeField']isnotNone:
ifmedia_info['normalizeField'].startswith(field_prefix)andmedia_info['normalizeField']infield_mapping:
media_info['normalizeField']="{0}{1}".format(field_prefix,field_mapping[media_info['normalizeField'][len(field_prefix):]])
if'fields'inmedia_infoandmedia_info['fields']isnotNone:
forfieldinmedia_info['fields']:
fields=[]
iffield.startswith(field_prefix)andfield[len(field_prefix):]infield_mapping:
fields.append("{0}{1}".format(field_prefix,field_mapping[field[len(field_prefix):]]))
else:
fields.append(field)
media_info['fields']=fields
def_zip_dir(path,zip_file,include_root=True):
"""Zipadirectoryoffiles.
Keywordarguments:
path-Thefoldercontainingthefilesandsubfolderstozip
zip_file-Thezipfilethatwillstorethecompressedfiles
include_root-Indicatesiftherootfoldershouldbeincludedinthezip"""
rel_path=''
ifinclude_root:
rel_path='..'
#Zipadirectoryoffiles
forroot,dirs,filesinos.walk(path):
forfileinfiles:
zip_file.write(os.path.join(root,file),os.path.relpath(os.path.join(root,file),os.path.join(path,rel_path)))
def_deep_get(dictionary,*keys):
"""Safelyreturnanestedvaluefromadictionary.Ifatanypointalongthepaththekeydoesn'texistthefunctionwillreturnNone.
Keywordarguments:
dictionary-Thedictionarytosearchforthevalue
*keys-Thekeysusedtofetchthedesiredvalue"""
returnreduce(lambdad,key:d.get(key)ifdelseNone,keys,dictionary)
#endregion
_COPY_ONLY_TAG='copy-only'
_TARGET_MUST_EXIST_TAG='target-must-exist'
_MAINTAIN_SPATIAL_REF='maintain-spatial-ref'
_TEMP_DIR=None
_TEXT_BASED_ITEM_TYPES=['WebMap','FeatureService','MapService','OperationView',
'ImageService','FeatureCollection','FeatureCollectionTemplate',
'WebMappingApplication','MobileApplication','SymbolSet','ColorSet']
_GPS_METADATA_FIELDS="""{
"fields":[{
"name":"ESRIGNSS_RECEIVER",
"type":"esriFieldTypeString",
"alias":"ReceiverName",
"sqlType":"sqlTypeOther",
"length":50,
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_H_RMS",
"type":"esriFieldTypeDouble",
"alias":"HorizontalAccuracy(m)",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_V_RMS",
"type":"esriFieldTypeDouble",
"alias":"VerticalAccuracy(m)",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_LATITUDE",
"type":"esriFieldTypeDouble",
"alias":"Latitude",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_LONGITUDE",
"type":"esriFieldTypeDouble",
"alias":"Longitude",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_ALTITUDE",
"type":"esriFieldTypeDouble",
"alias":"Altitude",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_PDOP",
"type":"esriFieldTypeDouble",
"alias":"PDOP",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_HDOP",
"type":"esriFieldTypeDouble",
"alias":"HDOP",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_VDOP",
"type":"esriFieldTypeDouble",
"alias":"VDOP",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_FIXTYPE",
"type":"esriFieldTypeSmallInteger",
"alias":"FixType",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":{
"type":"codedValue",
"name":"ESRI_FIX_TYPE_DOMAIN",
"codedValues":[{
"name":"Fixnotvalid",
"code":0
},{
"name":"GPS",
"code":1
},{
"name":"DifferentialGPS",
"code":2
},{
"name":"RTKFixed",
"code":4
},{
"name":"RTKFloat",
"code":5
}
]
},
"defaultValue":null
},{
"name":"ESRIGNSS_CORRECTIONAGE",
"type":"esriFieldTypeDouble",
"alias":"CorrectionAge",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_STATIONID",
"type":"esriFieldTypeSmallInteger",
"alias":"StationID",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":{
"type":"range",
"name":"ESRI_STATION_ID_DOMAIN",
"range":[
0,
1023
]
},
"defaultValue":null
},{
"name":"ESRIGNSS_NUMSATS",
"type":"esriFieldTypeSmallInteger",
"alias":"NumberofSatellites",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":{
"type":"range",
"name":"ESRI_NUM_SATS_DOMAIN",
"range":[
0,
99
]
},
"defaultValue":null
},{
"name":"ESRIGNSS_FIXDATETIME",
"type":"esriFieldTypeDate",
"alias":"FixTime",
"sqlType":"sqlTypeOther",
"length":0,
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_AVG_H_RMS",
"type":"esriFieldTypeDouble",
"alias":"AverageHorizontalAccuracy(m)",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_AVG_V_RMS",
"type":"esriFieldTypeDouble",
"alias":"AverageVerticalAccuracy(m)",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_AVG_POSITIONS",
"type":"esriFieldTypeSmallInteger",
"alias":"AveragedPositions",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
},{
"name":"ESRIGNSS_H_STDDEV",
"type":"esriFieldTypeDouble",
"alias":"StandardDeviation(m)",
"sqlType":"sqlTypeOther",
"nullable":true,
"editable":true,
"domain":null,
"defaultValue":null
}
],
"popup":[{
"fieldName":"ESRIGNSS_RECEIVER",
"label":"ReceiverName",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"stringFieldOption":"textbox"
},{
"fieldName":"ESRIGNSS_H_RMS",
"label":"HorizontalAccuracy(m)",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":2,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_V_RMS",
"label":"VerticalAccuracy(m)",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":2,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_LATITUDE",
"label":"Latitude",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":8,
"digitSeparator":false
}
},{
"fieldName":"ESRIGNSS_LONGITUDE",
"label":"Longitude",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":8,
"digitSeparator":false
}
},{
"fieldName":"ESRIGNSS_ALTITUDE",
"label":"Altitude",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":2,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_PDOP",
"label":"PDOP",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":2,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_HDOP",
"label":"HDOP",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":2,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_VDOP",
"label":"VDOP",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":2,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_FIXTYPE",
"label":"FixType",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"stringFieldOption":"textbox"
},{
"fieldName":"ESRIGNSS_CORRECTIONAGE",
"label":"CorrectionAge",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":0,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_STATIONID",
"label":"StationID",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":0,
"digitSeparator":false
}
},{
"fieldName":"ESRIGNSS_NUMSATS",
"label":"NumberofSatellites",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":0,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_FIXDATETIME",
"label":"FixTime",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"dateFormat":"shortDateShortTime",
"timezone":"utc"
}
},{
"fieldName":"ESRIGNSS_AVG_H_RMS",
"label":"AverageHorizontalAccuracy(m)",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":2,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_AVG_V_RMS",
"label":"AverageVerticalAccuracy(m)",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":2,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_AVG_POSITIONS",
"label":"AveragedPositions",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":0,
"digitSeparator":true
}
},{
"fieldName":"ESRIGNSS_H_STDDEV",
"label":"StandardDeviation(m)",
"isEditable":false,
"isEditableOnLayer":true,
"visible":false,
"format":{
"places":3,
"digitSeparator":true
}
}
]
}"""
Copylines
Copypermalink
Viewgitblame
Go
Youcan’tperformthatactionatthistime.
Yousignedinwithanothertaborwindow.Reloadtorefreshyoursession.
Yousignedoutinanothertaborwindow.Reloadtorefreshyoursession.