Tuesday, July 26, 2016

Geog 491: Exercise 7- Risk Model for Landslide Susceptibility in Oregon

Goal and Background

The main purpose of this exercise is to generate a risk model for landslide in Northwestern Oregon.  The data used was prepared in previous exercises by projecting and clipping slope, land cover, precipitation, and hillshade.  The objectives are stated below:


  • Set up a script, import modules, set environments and check out extensions.
  • Set up smart variables and create a list.
  • Create a fishnet to be used as the unit of analysis in combination with the roadway buffer. 
  • Buffer the roads.
  • Intersect the road buffer and the fishnet to create the unit of analysis feature class.
  • Create a variable for the reclass values for slope and landcover. 
  • Apply the reclass values using the reclassify function. 
  • Multiply the rasters together to create the risk raster. 
  • Use zonal statistics as a table to tally the risk values for the unit of analysis. 
  • Join the results back to the unit of analysis. 
  • Create a map of the results
Methods

Variables were created in order to give data to work with.  Field names were set as well in order to make things neat in the beginning.  A processing boundary was set in order to only get data in a certain area.  Geoprocessing tools buffer, intersect, and more were used.  

Results

The map and script for this exercise are pictured below:


#-------------------------------------------------------------------

# Name:        Exercise 7
# Purpose:     Generate a risk model for landslides in Oregon
#
# Author:      Laura Hartley, hartlelj
#
# Date:        7/26/2016
#-------------------------------------------------------------------
#Objective One
#Ensure the program is working
print "Script initialized"

#import system modules
import arcpy
from arcpy import env
import os
import shutil
import time
import datetime
from arcpy.sa import*
from arcpy.ddd import*

#Allow the script to overwrite existing files
arcpy.env.overwriteOutput = True
arcpy.env.workspace = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb"
arcpy.env.outputCoordinateSystem = "C:\Users\lhartley13\Documents\ArcGIS\EX5\Ex5\Ex5.gdb\OregonBoundary"
#NAD_1983_HARN_Oregon_Statewide_Lambert
arcpy.CheckOutExtension('3D')
arcpy.CheckOutExtension('spatial')

#Objective Two
#Create Variables
geodatabasePath = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb"
featureDatasetPath = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb\OregonFCs"

majorRoads = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb\OregonFCs\MajorRoads_NW"
landslidePts = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb\OregonFCs\LandslidePoints_NW"
precipData = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb\NW_Oregon_PrecipitationData_800m"
slopeData = "C:\Users\lhartley13\Documents\ArcGIS\EX5\Ex5\Ex5.gdb\NW_Oregon_Slope_30m"
landUseData = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb\NLCD2011_Oregon_NW"

boundaryName = "ProcessingBoundary"
fishnetName = "Oregon_NW_1kmFishnet"
roadsBufferName = "MajorRoads_NW_Buffered"
roadsBuffIntName = "MajorRoads_NW_Buffer_1kmGrid"
riskRasterName = "NW_Oregon_LandslideRisk"
pointName = os.path.basename(landslidePts).rstrip(os.path.splitext(landslidePts)[1])

processingBoundary = os.path.join(featureDatasetPath,boundaryName)
outputFishnet = os.path.join(featureDatasetPath,fishnetName)
roadBufferOutput = os.path.join(featureDatasetPath,roadsBufferName)
roadBuffGridOutput = os.path.join(featureDatasetPath,roadsBuffIntName)
slopeReclassRaster = slopeData + "_Reclass"
lulcReclassRaster = landUseData + "_Reclass"

landslideRiskRaster = os.path.join(geodatabasePath,riskRasterName)
landslideRiskStatistics = landslideRiskRaster + "_Stats"

#Set up field names
bufferedRoadsUniqueIDField = "FID_Oregon_NW_1kmFishnet"

#Objective Three
#Create processing boundary by extracting the footprint from the slope raster
print "Creating Processing boundary!" ,datetime.datetime.now().strftime("%H:%M:%S")
arcpy.RasterDomain_3d(slopeData,processingBoundary,"POLYGON")

#Create fishnet (Grid) with 1km spacing
#Variables for fishnet
desc = arcpy.Describe(processingBoundary)
gridSize = 1000.0

print  "Creating Fishnet!" ,datetime.datetime.now().strftime("%H:%M:%S")
arcpy.CreateFishnet_management(outputFishnet,str(desc.extent.lowerLeft),str(desc.extent.XMin)+" "+str(desc.extent.YMax+10),gridSize,gridSize,"0","0",str(desc.extent.upperRight),"NO_LABELS",processingBoundary,"POLYGON")

#Objective Four
#Process- Buffer
arcpy.Buffer_analysis(majorRoads,roadBufferOutput,"100 Meters","FULL","ROUND","ALL","","PLANAR")

#Objective Five
#Process- Intersect
print "Intersecting!" ,datetime.datetime.now().strftime("%H:%M:%S")
arcpy.Intersect_analysis([roadBufferOutput,outputFishnet],roadBuffGridOutput,"ALL","","INPUT")

#Objective Six
#This string specifies different value ranges and the values these ranges will be assigned.
#The range 0-1.484953 will be assigned a value of 1.
remapStringSlope = "0 1.484953 1;1.484954 6.184316 2;6.184317 10.883696 3;10.883697 15.583077 4;15.583078 29.681219 5;29.681220 34.380600 4;34.380601 39.079981 3;39.079982 43.779362 2;43.779363 90 1"

#This string specifies risk values to the different nlcd classes based on their values.
#Open water has a raster value '11' and is 1 on the risk scale. 'Developed, open space' has a raster values of '21' and a risk value of 4.
remapStringNLCD = "11 1;21 4;22 4;23 3;24 1;31 2;41 4;42 5;43 5;52 5;71 4;81 3;82 3;90 3;95 1"

#Objective Seven
print "Reclassifying slope into individual run type classes" ,datetime.datetime.now().strftime("%H:%M:%S")
arcpy.Reclassify_3d(slopeData,"VALUE",remapStringSlope,slopeReclassRaster,"NODATA")

print "Reclassifying landuse into individual run type classes" ,datetime.datetime.now().strftime("%H:%M:%S")
arcpy.Reclassify_3d(landUseData,"VALUE",remapStringNLCD,lulcReclassRaster,"NODATA")

#Objective Eight
print "Multiplying the reclasses rasters to obtain a risk value"
arcpy.Times_3d(slopeReclassRaster,lulcReclassRaster,landslideRiskRaster)

#Objective Nine
#Process- Zonal statistics as table
arcpy.gp.ZonalStatisticsAsTable_sa(roadBuffGridOutput,bufferedRoadsUniqueIDField,landslideRiskRaster,landslideRiskStatistics,"DATA","MEDIAN")

#Objective Ten
#Process- Join Field
arcpy.JoinField_management(roadBuffGridOutput,bufferedRoadsUniqueIDField,landslideRiskStatistics,bufferedRoadsUniqueIDField,"MEDIAN")

print "script is complete" ,datetime.datetime.now().strftime("%H:%M:%S")


Sources

http://spatialdata.oregonexplorer.info/geoportal/catalog/main/home.page
http://nationalmap.gov/

Geog 491: Exercise 6- Analyzing Raster Data for Landslide Susceptibility in Oregon

Goal and Background

The main purpose of this exercise is to use a number of raster and vector tools to identify common characteristics of landslides in Oregon.  The objectives are stated below:


  • Set up a script, import modules, set environments and check out extensions.
  • Set up smart variables and create a list.
  • Use the select tool to query landslides that meet a set of criteria.
  • Add the raster values from precipitation, slope and land use to the point feature class of landslides.
  • Add and calculate a field to determine the radius of a buffer based on the slide characteristics.
  • Buffer the landslides with the calculated values.
  • Use zonal statistics to calculate the median slope of the buffered landslides and join the results of the table back to the buffered landslides.
  • Create and use a search and update cursor to update any values that are null.
  • Calculate summary statistics for the raster values of precipitation, slope and slide area based on movement class, land cover type as well as the combination of both movement class and land cover type.
  • Use the tabulate area tool to calculate how much of each buffer falls within different analysis classes.  Export the table for additional analysis in MS Excel.
  • Use a loop to delete any of the intermediate features classes.
Methods

After creating the basic start of the script (importing system modules, allowing script to overwrite existing files, etc) naming conventions were set up for output files.  Lists were created next because many features were to be created and then deleted later on.  Lists make this process easier.  Field names were set and geoprocessing tools were run.  Later, and update cursor was used in order to update or delete rows in specified feature classes and more.  Statistics were calculated and exported to a MS Excel file.  The script for this exercise is stated below.

Results


#-------------------------------------------------------------------
# Name:        Exercise 6
# Purpose:     Calculate the landscape characteristics of landslides #              based on precipitation, slope, and landcover.
#
# Author:      Laura Hartley, hartlelj
#
# Date:        7/25/2016
#-------------------------------------------------------------------
#Ensure the program is working
print "Script initialized"

#import system modules
import arcpy
from arcpy import env
import os
import shutil
import time
import datetime
from arcpy.sa import *
from arcpy.ddd import *

#Allow the script to overwrite existing files
arcpy.env.workspace = "C:\Users\lhartley13\Documents\ArcGIS\EX6"
arcpy.env.overwriteOutput = True
arcpy.env.outputCoordinateSystem = "C:\Users\lhartley13\Documents\ArcGIS\EX5\Ex5\Ex5.gdb\OregonBoundary"
#NAD_1983_HARN_Oregon_Statewide_Lambert
arcpy.CheckOutExtension('3D')
arcpy.CheckOutExtension('spatial')

#Objective Two: Create Variables
geodatabasePath = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb"
featureDataPath = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb\OregonFCs"
otherDataFolder = "C:\Users\lhartley13\Documents\ArcGIS\EX6"

landslidePts = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb\OregonFCs\LandslidePoints_NW"
precipData = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb\NW_Oregon_PrecipitationData_800m"
landUseData = "C:\Users\lhartley13\Documents\ArcGIS\EX6\Ex6.gdb\NLCD2011_Oregon_NW"
slopeData = "C:\Users\lhartley13\Documents\ArcGIS\EX5\Ex5\Ex5.gdb\NW_Oregon_Slope_30m"

pointName = os.path.basename(landslidePts).rstrip(os.path.splitext(landslidePts)[1])

pointSelectOutput = os.path.join(geodatabasePath,pointName) +"_Selected"
pointsWithLandUse = os.path.join(geodatabasePath,pointName) +"_LU"
pointsWithSlope = os.path.join(geodatabasePath,pointName) +"_slope"
pointsWithPrecip = os.path.join(geodatabasePath,pointName) +"_Precip"

pointsBuffered=pointSelectOutput + "_Buffer"

bufferedSlopeStatistics = pointsBuffered + "_Slope"
bufferedSlideTypeSummary = pointsBuffered + "_Summary_SlideType"
bufferedLandUseSummary = pointsBuffered +"_Summary_LULC"
bufferedBothSummary = pointsBuffered + "_Summary_Both"
buffered_TabulateLULC = pointsBuffered + "_TabulateLULC"

tabulateLULC_ExcelTable = os.path.join(otherDataFolder,os.path.basename(buffered_TabulateLULC).rstrip(os.path.splitext(buffered_TabulateLULC)[1]))+".xls"

createdFCs = []
createdFCs.append(pointSelectOutput)
createdFCs.append(pointsWithLandUse)
createdFCs.append(pointsWithSlope)
createdFCs.append(pointsWithPrecip)

#field names
uniqueIDfield = "UNIQUE_ID"
bufferDistanceField = "BufferDistance"
slideLengthmFieldName = 'Slide_Length_m'
rasterValueFieldName = "RASTERVALU"
lulcFieldName = "NLCD_2011"
moveClassFieldName = "MOVE_CLASS"
pointSlopeFieldName = 'Slope_Pt'
pointPrecipFieldName = 'PRECIP_mm'
bufferedSlopeFieldName = 'Slope_Buffered_Mean'
calculatedSlopeFieldName = 'Slope_Mean_Calculated'

#Process: Select
print "Selecting!" ,datetime.datetime.now().strftime("%H:%M:%S")
arcpy.Select_analysis(landslidePts,pointSelectOutput,"LENGTH_ft<>0 AND WIDTH_ft<>0AND MOVE_CLASS IN ('Debris Flow','Earth Flow','Flow')")

#Add land use class information to the landslide points only for their specific locations
ExtractValuesToPoints(pointSelectOutput,landUseData,pointsWithLandUse, "NONE","ALL")
arcpy.AlterField_management(pointsWithLandUse,rasterValueFieldName,"LULC_Value","LULC Value")

#Add slope information to the landslide points using interpolation
ExtractMultiValuesToPoints(pointsWithLandUse,[[slopeData,pointSlopeFieldName],[precipData,pointPrecipFieldName]],"BILINEAR")

#Process- Add Field
arcpy.AddField_management(pointsWithLandUse,bufferDistanceField,"FLOAT","","","","","NULLABLE","NON_REQUIRED","")

#Process- Calculate Field
arcpy.CalculateField_management(pointsWithLandUse,bufferDistanceField, "(!LENGTH_ft!+!WIDTH_ft!)/2*0.3048", "PYTHON_9.3","")

#Process- Add Field
arcpy.AddField_management(pointsWithLandUse,slideLengthmFieldName,"FLOAT","","","","","NULLABLE","NON_REQUIRED","")

#Process- Calculate Field
arcpy.CalculateField_management(pointsWithLandUse,slideLengthmFieldName,"!LENGTH_ft!*0.3048","PYTHON_9.3","")

#Process- Buffer (2)
print "Buffering!" ,datetime.datetime.now().strftime("%H:%M:%S")
arcpy.Buffer_analysis(pointsWithLandUse,pointsBuffered,bufferDistanceField,"FULL","ROUND","NONE","","PLANAR")

#Process- Zonal Statistics as Table
arcpy.gp.ZonalStatisticsAsTable_sa(pointsBuffered,uniqueIDfield,slopeData,bufferedSlopeStatistics,"DATA","ALL")

#Process- Join Field
arcpy.JoinField_management(pointsBuffered,uniqueIDfield,bufferedSlopeStatistics,uniqueIDfield,"Min;Max;Range;Mean;Std")

#Alter the joined field name so it's easily indentifiable
arcpy.AlterField_management(pointsBuffered,"MEAN",bufferedSlopeFieldName,bufferedSlopeFieldName)

#Process- Add Field for the search and update cursor
arcpy.AddField_management(pointsBuffered,calculatedSlopeFieldName,"FLOAT","","","","","NULLABLE","NON_REQUIRED","")


#Create and use an update cursor to set the slope value to the buffered value for all that are not null,
#and the point for features that have a null buffered value
print "Setting values based on buffered and slope points!" ,datetime.datetime.now().strftime("%H:%M:%S")
listOfFields=[pointSlopeFieldName,bufferedSlopeFieldName,calculatedSlopeFieldName]

#The update cursor takes a list of field and allows for the values to be changed by specific functions
with arcpy.da.UpdateCursor(pointsBuffered,listOfFields) as cursor:
    #it allows for iteration by row so each value will be calculated independently
    for row in cursor:
        if(row[1]==None):
            row[2] = row[0]
            cursor.updateRow(row)
        else:
            row[2] = row[1]
            cursor.updateRow(row)
del cursor

#Calculate summary statistics for precip, slope, and slide area, based on movement class
print "Calculating summary statistics for precip, slope, and slide area, based on movement class",datetime.datetime.now().strftime("%H:%M:%S")
arcpy.Statistics_analysis(pointsBuffered,bufferedSlideTypeSummary,[[pointPrecipFieldName,"MEAN"],[pointSlopeFieldName,"MEAN"],[bufferDistanceField,"MEAN"],[slideLengthmFieldName,"MEAN"]],moveClassFieldName)

#Calculate summary statistics for precip, slope, and slide area, based on LULC data
print "Calculating summary statistics for precip, slope, and slide area, based on LULC" ,datetime.datetime.now().strftime("%H:%M:%S")
arcpy.Statistics_analysis(pointsBuffered,bufferedLandUseSummary,[[pointPrecipFieldName,"MEAN"],[pointSlopeFieldName,"MEAN"],[bufferDistanceField,"MEAN"],[slideLengthmFieldName,"MEAN"]],lulcFieldName)

#Calculate summary statistics for precip, slope, and slide area, based on movement class and LULC class
print "Calculating summary statistics for precip, slope, and slide area, based on LULC" ,datetime.datetime.now().strftime("%H:%M:%S")
arcpy.Statistics_analysis(pointsBuffered,bufferedBothSummary,[[pointPrecipFieldName,"MEAN"],[pointSlopeFieldName,"MEAN"],[bufferDistanceField,"MEAN"],[slideLengthmFieldName,"MEAN"]],[lulcFieldName,moveClassFieldName])

#Tabulate area is used to identify how much of each buffer falls within different LULC classes
print "Tabulating the area of each LULC class for each buffered point"
TabulateArea(pointsBuffered,uniqueIDfield,landUseData,lulcFieldName,buffered_TabulateLULC,15)

#Export the table to excel for additional analysis
arcpy.TableToExcel_conversion(buffered_TabulateLULC,tabulateLULC_ExcelTable)

for fc in createdFCs:
    if arcpy.Exists(fc):
        print fc +"Exists, and will now be deleted"
        arcpy.Delete_management(fc)

print "script is complete" ,datetime.datetime.now().strftime("%H:%M:%S")

Wednesday, July 20, 2016

Geog 491: Exercise 5- Preparing Raster Data for Analysis using Raster Lists and Loops

Goals and Background

This exercise focused on using standard raster geoprocessing tools and basic raster analysis tools in a FOR IN loop by creating a list of rasters.  The objectives are stated below,

  • Set up a script, import modules, set environments and check out extensions.
  • Set up smart variables.
  • Create a list of rasters.
  • Set up a FOR IN loop.
  • Format the raster names in the list of rasters.
  • Project a list of raster
  • Clip a list of raster.
  • Create a hillshade for a list of rasters.
  • Calculate slope for a list of raster.
  • Merge the resuts together.
Methods

Lists were created in order to list the final rasters when the script was done.  Then the loops were set up in order to run the script multiple times on all of the rasters.  The script can be seen below.

Results


#-------------------------------------------------------------------
# Name:        Exercise 5
# Purpose:     To use standard raster geoprocessing preparation     #              tools, such as
#              project, clip, as well as basic raster analysis tools #              such as,
#              hillshade and slope, in a FOR IN loop by creating a   #              list of rasters.
#
# Author:      Laura Hartley, hartlelj
#
# Date:        7/20/2016
#-------------------------------------------------------------------
#Ensure the program is working
print "Script initialized"

#import system modules
import os
import shutil
import time
import datetime
import arcpy
from arcpy import env
from arcpy.sa import *
from arcpy.ddd import *

#Allow the script to overwrite existing files
arcpy.env.workspace = "C:\Users\lhartley13\Documents\ArcGIS\EX5\Ex5\oregon"
arcpy.env.overwriteOutput = True

#The next line needs to be set to your EX5 geodatabase's Oregon Boundary FC
arcpy.env.outputCoordinateSystem = "C:\Users\lhartley13\Documents\ArcGIS\EX5\Ex5\Ex5.gdb\OregonBoundary"
arcpy.CheckOutExtension('3D')
arcpy.CheckOutExtension('spatial')

#create variables
print "Creating Variables" ,datetime.datetime.now().strftime("%H:%M:%S")

#The workspace is the folder holding the rasters
workspace = r"C:\Users\lhartley13\Documents\ArcGIS\EX5\Ex5\oregon"
geodatabasePath = "C:\Users\lhartley13\Documents\ArcGIS\EX5\Ex5\Ex5.gdb"

#Set the boundary used to clip all of the raster to the Oregon State boundary
clipBoundary = "C:\Users\lhartley13\Documents\ArcGIS\EX5\Ex5\Ex5.gdb\OregonBoundary"

#This list will hold all of the clipped rasters for a future mosaicing operation
clipList = []
#This list is similar to the clip list, but is for hillshades
hsList = []
#this list is for slope rasters
slopeList = []

#The following names will be used for the three mosaic operations: Elevation, hillshade, and slope
mosaicedDEM = "NW_Oregon_Elevation_30m"
mosaicedHS = "NW_Oregon_Hillshade_30m"
mosaicedSlope = "NW_Oregon_Slope_30m"

print "Obtaining a list of rasters"
rasters = arcpy.ListRasters("", "IMG")

#This command selects just a single raster from the list named 'rasters'
for raster in rasters:

    print "Formatting the raster's name"
    #using os.path.basename, we obtain just the base name of the         raster, without the file extension or source path
    outName = os.path.basename(raster).rstrip
    (os.path.splitext(raster)[1])
    print "Unformatted name: " +outName
    outName = str(outName)

    #we use a string 'slice' to remove the first three characters       from the filename
    #essentially removing the img, and leaving only the coordinates
    fileName = str(outName[3:])
    print "Formatted name: " +fileName

    #using the newly reformatted filename, we set all of the names       for the new rasters we are to create
    rasterProj = os.path.join(geodatabasePath,fileName) + "_Proj"
    rasterClip = os.path.join(geodatabasePath,fileName) +"_Clip"
    rasterHS = os.path.join(geodatabasePath,fileName) +"_HS"
    rasterSlope = os.path.join(geodatabasePath,fileName) +"_Slope"

    #print rasterProj, to make sure the name formatting is working
    print rasterProj

    #we already set the CS using the output CS env setting so we         #don't need to worry about it here.
    #Be sure to use 'BILLINEAR' or 'CUBIC' resampling when               #projecting elevation and other continuous 
    #datasets, as they will otherwise have incorrect values.
    print "Projecting" + fileName
    ,datetime.datetime.now().strftime("%H:%M:%S")
    arcpy.ProjectRaster_management(raster,rasterProj,"", "BILINEAR")

    #the projected raster will be clipped to the state boundary, and     'NO_NAINTAIN_EXTENT" should be set so
    #the alignment of the pixels isn't changed
    print "Clipping" +fileName 
    ,datetime.datetime.now().strftime("%H:%M:%S")
    arcpy.Clip_management(rasterProj,"",rasterClip,clipBoundary,
    "","ClippingGeometry","NO_MAINTAIN_EXTENT")
    clipList.append(rasterClip)

    #creating a hillshade from the clipped raster
    print "Hillshading" +fileName
    ,datetime.datetime.now().strftime("%H:%M:%S")
    arcpy.HillShade_3d(rasterClip,rasterHS)
    hsList.append(rasterHS)

    #create a slope raster from the clipped raster
    print "Calculating slope of" + fileName
    arcpy.Slope_3d(rasterClip, rasterSlope)
    slopeList.append(rasterSlope)

    print "the loop has completed once"

#merging the tiles
print "Mosacing the clipped DEMs"
arcpy.MosaicToNewRaster_management(clipList,geodatabasePath,mosaicedDEM,"","32_BIT_FLOAT","",1)

print "Mosacing the hillshaded DEMs"
arcpy.MosaicToNewRaster_management(hsList,geodatabasePath,mosaicedHS,"","8_BIT_UNSIGNED","",1)

print "Mosaicing the slope raster"
arcpy.MosaicToNewRaster_management(slopeList,geodatabasePath,mosaicedSlope,"","32_BIT_FLOAT","",1)

print "Script Complete" ,datetime.datetime.now().strftime("%H:%M:%S")

Geog 491: Exercise 4- Working with Fields and Selections

Goal and Background

To use python to add a field, calculate a field and apply and SQL statement.  The objectives for this exercise are stated below,

  • Set up the script and import the modules.
  • Set up smart variables.
  • Add a field and calculate a field.
  • Select features based on an SQL statement.
  • Add a field and calculate for selected features.
Methods

The comments in green were added along the way in order to keep the script in order.  Add and field and calculate field were used along with an SQL statement in order to produce results to be used in a future exercise.  From these fields, a select tool was used and in the end compactness was calculated.  Below is the script written for this exercise.  Exercise 3 data was used.

Results


#-------------------------------------------------------------------
# Name:        Exercise 4
# Purpose:     Find ideal locations for a ski resort by calculating #              compactness
#
# Author:      Laura Hartley, hartlelj
#
# Date:        7/18/2016
#-------------------------------------------------------------------

#Ensure the program is working
print "script initialized"

#import system modules
import arcpy
from arcpy import env
import os
import time
import datetime

#Allow the script to overwrite existing files
arcpy.env.overwriteOutput = True

#Create variables
print "Creating Variables"
,datetime.datetime.now().strftime("%H:%M:%S")
#input variables
ex3geodatabasePath = "C:\Users\lhartley13\Documents\ArcGIS\EX3\EX3.gdb"

#This filename will be used for the subsequent 'dissolve output' fc name
intersectName = "IntersectedFcs"
dissolveOutput = os.path.join(ex3geodatabasePath,intersectName)+"_Dissolved"

#output variables
selectName = "DissolveFC_Selected"
finalSelect = os.path.join(ex3geodatabasePath,selectName)

print "Adding a field to the dissolve fcs" ,datetime.datetime.now().strftime("%H,%M,%S")
#Add field
arcpy.AddField_management(dissolveOutput, "Area_km2", "FLOAT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")

print "Calculating the area in km2" ,datetime.datetime.now().strftime ("%H:%M:%S")
#Calculate field
arcpy.CalculateField_management(dissolveOutput, "Area_km2",  
"[Shape_Area] / 1000000", "VB", "")

print "Selecting only polygons with areas greater than 2km" ,datetime.datetime.now().strftime ("%H:%M:%S")
#Select (4)
arcpy.Select_analysis(dissolveOutput, finalSelect, "Area_km2 >2")

print "Adding a field for calculating compactness" ,datetime.datetime.now().strftime("%H:%M:%S")
#Add field (2)
arcpy.AddField_management(finalSelect,"Compactness_Float", "FLOAT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")

print "Calculating compactness" ,datetime.datetime.now().strftime("%H:%M:%S")
#Calculate field (2)
arcpy.CalculateField_management(finalSelect, "Compactness_Float"
"[Shape_Area] / ([Shape_Length] )", "VB", "")

print "Calculations complete" ,datetime.datetime.now().strftime("%H:%M:%S")

Geog 491: Exercise 3- Introduction to Geoprocessing with Python

Goal and Background

The main goal of this exercise is to export a geoprocessing model as a script and add multiple smart variables and modify the exported script.  The objectives needed to follow in order to complete this exercise were:

  • Export a geoprocessing model as a python script.
  • Importing modules and setting up a script.
  • Creating smart variables.
  • Setting up a geoprocessing workflow.

Methods

The exercise 1 data was used in this exercise in order to further manipulate the data.  Green comments in the script below were used to describe what was happeneing in the process below it.  Print statements along with time stamps were used to keep track of when the tools began running along with getting more familiar with print and time statements.  Below is the script used in exercise 3 in order to produce the above stated objectives.

Results

#-------------------------------------------------------------------
# Name:        Exercise 3
# Purpose:     To export a geoprocessing model as a script, add     #              several smart
#              variables and modify the exported script.  Finding   #              ideal areas for
#              ski resorts
#
# Author:      Laura Hartley, hartlelj
#
# Date:        7/15/2016
#-------------------------------------------------------------------
#ensure the program is working
print "script initialized"

#import arcpy and set up the environments
import arcpy
from arcpy import env
import os
import time
import datetime

#overwrite allows us to overwrite files and save over them
arcpy.env.overwriteOutput = True

#create variables
print "Creating Variables"
,datetime.datetime.now().strftime("%H:%M:%S")

#input geodatabse
ex1geodatabasePath = "C:\Users\lhartley13\Documents\ArcGIS\EX1\Ex1.gdb"

#input names
nfsLand = "NFS_Land"
snowDepth = "SnowDepth_in"
temp = "Temperature_F"
studyArea = "StudyArea"
airports = "Airports"

#linking geodatabse with name
nfsInput = os.path.join(ex1geodatabasePath,nfsLand)
snowDepthInput = os.path.join(ex1geodatabasePath,snowDepth)
tempInput = os.path.join(ex1geodatabasePath,temp)
studyAreaInput = os.path.join(ex1geodatabasePath,studyArea)
airportsInput = "C:\Users\lhartley13\Documents\ArcGIS\EX1\Ex1.gdb\Airports_project"

#output Variables
ex3geodatabasePath = "C:\Users\lhartley13\Documents\ArcGIS\EX3\EX3.gdb"

#clip the layers
nfsLandClip = os.path.join(ex3geodatabasePath,nfsLand) + "_Clip"
snowDepthClip = os.path.join(ex3geodatabasePath,snowDepth) + "_Clip"
tempClip = os.path.join(ex3geodatabasePath,temp) + "_Clip"
airportsClip = os.path.join(ex3geodatabasePath,airports) + "_Clip"

#select the layers
snowDepthSelected = os.path.join(ex3geodatabasePath,snowDepth) + "_Selected"
tempSelected = os.path.join(ex3geodatabasePath,temp) + "_Selected"
airportsSelected = os.path.join(ex3geodatabasePath,airports) + "_Selected"

#buffer output
airportsBuffered = os.path.join(ex3geodatabasePath,airports) + "_Buffer"

#intersect output
intersectName = "IntersectedFcs"
intersectOutput = os.path.join(ex3geodatabasePath,intersectName)

#dissolve output
dissolveOutput = os.path.join(ex3geodatabasePath,intersectName) + "_Dissolved"

#final select
finalSelect = os.path.join(ex3geodatabasePath,intersectName)+ "_MoreThan2km2"

#Begin processing
print "Starting to process" ,datetime.datetime.now().strftime("%H:%M:%S")

print "Clipping fcs to within the study area" ,datetime.datetime.now().strftime("%H:%M:%S")
#clip all of the feature classes to the study area
#clip snow depth
arcpy.Clip_analysis(snowDepthInput, studyAreaInput,
snowDepthClip, "")

#clip temperature
arcpy.Clip_analysis(tempInput, studyAreaInput, tempClip, "")

#clip nfs land
arcpy.Clip_analysis(nfsInput, studyAreaInput, nfsLandClip, "")

#clip airports
arcpy.Clip_analysis(airportsInput, studyAreaInput, airportsClip, "")

#select meaningful values froom the clipped classes
print "Selecting meaningful values from the clipped classes",datetime.datetime.now().strftime("%H:%M:%S")
arcpy.Select_analysis(snowDepthClip,snowDepthSelected, "gridcode >= 250")

#process: select(2)
arcpy.Select_analysis(tempClip, tempSelected, "gridcode<32")

#process: select(3)
arcpy.Select_analysis(airportsClip, airportsSelected, "OwnerType = 'Pu' AND hasTower = 'Y'")

print "Buffering selected airports" ,datetime.datetime.now().strftime("%H:%M:%S")
#process: buffer
arcpy.Buffer_analysis(airportsSelected, airportsBuffered, "40 Miles", "FULL", "ROUND", "ALL", "", "PLANAR")

print "Intersecting the fcs" ,datetime.datetime.now().strftime("%H:%M:%S")
#process: intersect (2)
arcpy.Intersect_analysis([snowDepthSelected, tempSelected, nfsLandClip, airportsBuffered], intersectOutput, "ALL", "", "INPUT")

print "Dissolving the intersected fcs" ,datetime.datetime.now().strftime("%H:%M:%S")
#process dissolve (2)
arcpy.Dissolve_management(intersectOutput, dissolveOutput, "", "", "SINGLE_PART", "DISSOLVE_LINES")


print "Calculations complete",datetime.datetime.now().strftime("%H:%M:%S")