from grid.model.perception.depth.zoedepth import ZoeDepthcar = AirGenCar()# We will be capturing an image from the AirGen simulator # and run model inference on it.img = car.getImage("front_center","rgb").datamodel = ZoeDepth(use_local =False)result = model.run(rgbimage=img)print(result.shape)
The ZoeDepth class provides a wrapper for the ZoeDepth model, which estimates depth maps from RGB images using the pipeline method from the Hugging Face transformers library.
from grid.model.perception.depth.zoedepth import ZoeDepthcar = AirGenCar()# We will be capturing an image from the AirGen simulator # and run model inference on it.img = car.getImage("front_center","rgb").datamodel = ZoeDepth(use_local =False)result = model.run(rgbimage=img)print(result.shape)
from grid.model.perception.depth.zoedepth import ZoeDepthcar = AirGenCar()# We will be capturing an image from the AirGen simulator # and run model inference on it.img = car.getImage("front_center","rgb").datamodel = ZoeDepth(use_local =False)result = model.run(rgbimage=img)print(result.shape)