Lidar colab
In [1]:
Copied!
import subprocess
try:
import lidar
except ImportError:
print("Installing lidar ...")
subprocess.check_call(["python", "-m", "pip", "install", "lidar"])
import subprocess
try:
import lidar
except ImportError:
print("Installing lidar ...")
subprocess.check_call(["python", "-m", "pip", "install", "lidar"])
In [2]:
Copied!
import os
import pkg_resources
from lidar import *
# identify the sample data directory of the package
package_name = "lidar"
data_dir = pkg_resources.resource_filename(package_name, "data/")
# use the sample dem. Change it to your own dem if needed
in_dem = os.path.join(data_dir, "dem.tif")
# set the output directory
out_dir = os.getcwd()
# parameters for identifying sinks and delineating nested depressions
min_size = 1000 # minimum number of pixels as a depression
min_depth = 0.5 # minimum depth as a depression
interval = 0.3 # slicing interval for the level-set method
bool_shp = True # output shapefiles for each individual level
# extracting sinks based on user-defined minimum depression size
out_dem = os.path.join(out_dir, "median.tif")
in_dem = MedianFilter(in_dem, kernel_size=3, out_file=out_dem)
sink_path = ExtractSinks(in_dem, min_size, out_dir)
dep_id_path, dep_level_path = DelineateDepressions(
sink_path, min_size, min_depth, interval, out_dir, bool_shp
)
print("Results are saved in: {}".format(out_dir))
import os
import pkg_resources
from lidar import *
# identify the sample data directory of the package
package_name = "lidar"
data_dir = pkg_resources.resource_filename(package_name, "data/")
# use the sample dem. Change it to your own dem if needed
in_dem = os.path.join(data_dir, "dem.tif")
# set the output directory
out_dir = os.getcwd()
# parameters for identifying sinks and delineating nested depressions
min_size = 1000 # minimum number of pixels as a depression
min_depth = 0.5 # minimum depth as a depression
interval = 0.3 # slicing interval for the level-set method
bool_shp = True # output shapefiles for each individual level
# extracting sinks based on user-defined minimum depression size
out_dem = os.path.join(out_dir, "median.tif")
in_dem = MedianFilter(in_dem, kernel_size=3, out_file=out_dem)
sink_path = ExtractSinks(in_dem, min_size, out_dir)
dep_id_path, dep_level_path = DelineateDepressions(
sink_path, min_size, min_depth, interval, out_dir, bool_shp
)
print("Results are saved in: {}".format(out_dir))
Median filtering ... Run time: 0.0599 seconds Saving dem ... Loading data ... min = 379.70, max = 410.72, no_data = -3.402823e+38, cell_size = 1.0 Depression filling ... Saving filled dem ... Region grouping ... Computing properties ... Saving sink dem ... Saving refined dem ... Converting raster to vector ... Total run time: 0.0972 s Reading data ... rows, cols: (400, 400) Pixel resolution: 1.0 Read data time: 0.0029 seconds Data preparation time: 0.0092 seconds Total number of regions: 1 Processing Region # 1 ... =========== Run time statistics =========== (rows, cols): (400, 400) Pixel resolution: 1.0 m Number of regions: 1 Data preparation time: 0.0092 s Identify level time: 0.2358 s Write image time: 0.0026 s Polygonize time: 0.0098 s Extract level time: 0.0570 s Total run time: 0.3150 s Results are saved in: /home/qiusheng/temp