From 738d62900ae70741c5b5c01f25b87eaf299203dc Mon Sep 17 00:00:00 2001 From: Sebastien Courroux Date: Tue, 6 Feb 2024 10:15:43 +0100 Subject: [PATCH] Fix some issues in Python code and notebooks --- Alignment-10Band.ipynb | 33 +++++------------- Alignment.ipynb | 29 ++++------------ Captures.ipynb | 8 ++--- MicaSense Image Processing Tutorial 1.ipynb | 38 ++++++++------------- MicaSense Image Processing Tutorial 2.ipynb | 8 ++--- MicaSense Image Processing Tutorial 3.ipynb | 8 ++--- 6 files changed, 39 insertions(+), 85 deletions(-) diff --git a/Alignment-10Band.ipynb b/Alignment-10Band.ipynb index a9ead889..b8b100ad 100644 --- a/Alignment-10Band.ipynb +++ b/Alignment-10Band.ipynb @@ -98,9 +98,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "import cv2\n", @@ -137,18 +135,13 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "\n", "cropped_dimensions, edges = imageutils.find_crop_bounds(capture, warp_matrices, warp_mode=warp_mode)\n", "im_aligned = imageutils.aligned_capture(capture, warp_matrices, warp_mode, cropped_dimensions, match_index, img_type=img_type)\n", - "display(im_aligned)\n", - "print(\"fff\")\n", - "im_aligned = capture.create_aligned_capture(warp_matrices=warp_matrices, motion_type=warp_mode, img_type=img_type, match_index=match_index)\n", - "display(im_aligned)" + "im_aligned = capture.create_aligned_capture(warp_matrices=warp_matrices, motion_type=warp_mode, img_type=img_type)" ] }, { @@ -163,9 +156,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "# figsize=(30,23) # use this size for full-image-resolution display\n", @@ -334,9 +325,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "from micasense import plotutils\n", @@ -395,9 +384,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "# Compute Normalized Difference Red Edge Index from the NIR(3) and RedEdge(4) bands\n", @@ -438,9 +425,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "x_band = red_band\n", @@ -469,9 +454,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "print(warp_matrices)" diff --git a/Alignment.ipynb b/Alignment.ipynb index ecc51d26..e465a18f 100644 --- a/Alignment.ipynb +++ b/Alignment.ipynb @@ -34,12 +34,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ - "import os, glob\n", "import micasense.capture as capture\n", "%matplotlib inline\n", "from pathlib import Path\n", @@ -111,9 +108,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "import cv2\n", @@ -170,9 +165,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "# figsize=(30,23) # use this size for full-image-resolution display\n", @@ -355,9 +348,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "from micasense import plotutils\n", @@ -416,9 +407,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "# Compute Normalized Difference Red Edge Index from the NIR(3) and RedEdge(4) bands\n", @@ -508,9 +497,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "x_band = red_band\n", @@ -539,9 +526,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": true - }, + "metadata": {}, "outputs": [], "source": [ "print(warp_matrices)" diff --git a/Captures.ipynb b/Captures.ipynb index 86417c1c..24d27876 100644 --- a/Captures.ipynb +++ b/Captures.ipynb @@ -35,13 +35,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ - "capture.plot_vignette();\n", - "capture.plot_undistorted_radiance();\n", + "capture.plot_vignette()\n", + "capture.plot_undistorted_radiance()\n", "capture.plot_panels()" ] }, diff --git a/MicaSense Image Processing Tutorial 1.ipynb b/MicaSense Image Processing Tutorial 1.ipynb index 57c50463..3b9ab686 100644 --- a/MicaSense Image Processing Tutorial 1.ipynb +++ b/MicaSense Image Processing Tutorial 1.ipynb @@ -8,9 +8,9 @@ "\n", "## Overview\n", "\n", - "This tutorial assumes you have gone through the basic setup [here](./MicaSense Image Processing Setup.html) and your system is set up and ready to go.\n", + "This tutorial assumes you have gone through the basic setup [here](./MicaSense Image Processing Setup.ipynb) and your system is set up and ready to go.\n", "\n", - "In this tutorial, we will walk through how to convert RedEdge data from raw images to radiace and then to reflectance. We will cover the tools required to do this, and walk through some of the basic image processing and radiometric conversions. \n", + "In this tutorial, we will walk through how to convert RedEdge data from raw images to radiance and then to reflectance. We will cover the tools required to do this, and walk through some of the basic image processing and radiometric conversions. \n", "\n", "### Opening an image with pyplot\n", "\n", @@ -20,16 +20,12 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "import cv2\n", "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import os,glob\n", - "import math\n", + "import os\n", "%matplotlib inline\n", "\n", "imagePath = os.path.join('.','data','REDEDGE-MX')\n", @@ -95,7 +91,7 @@ "if os.name == 'nt':\n", " exiftoolPath = os.environ.get('exiftoolpath')\n", "# get image metadata\n", - "meta = metadata.Metadata(imageName, exiftoolPath=exiftoolPath)\n", + "meta = metadata.Metadata(imageName, exiftool_path=exiftoolPath)\n", "cameraMake = meta.get_item('EXIF:Make')\n", "cameraModel = meta.get_item('EXIF:Model')\n", "firmwareVersion = meta.get_item('EXIF:Software')\n", @@ -174,18 +170,16 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "import micasense.utils as msutils\n", "radianceImage, L, V, R = msutils.raw_image_to_radiance(meta, imageRaw)\n", - "plotutils.plotwithcolorbar(V,'Vignette Factor');\n", - "plotutils.plotwithcolorbar(R,'Row Gradient Factor');\n", - "plotutils.plotwithcolorbar(V*R,'Combined Corrections');\n", - "plotutils.plotwithcolorbar(L,'Vignette and row gradient corrected raw values');\n", - "plotutils.plotwithcolorbar(radianceImage,'All factors applied and scaled to radiance');" + "plotutils.plotwithcolorbar(V,'Vignette Factor')\n", + "plotutils.plotwithcolorbar(R,'Row Gradient Factor')\n", + "plotutils.plotwithcolorbar(V*R,'Combined Corrections')\n", + "plotutils.plotwithcolorbar(L,'Vignette and row gradient corrected raw values')\n", + "plotutils.plotwithcolorbar(radianceImage,'All factors applied and scaled to radiance')" ] }, { @@ -202,9 +196,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "markedImg = radianceImage.copy()\n", @@ -278,9 +270,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "# correct for lens distortions to make straight lines straight\n", @@ -323,7 +313,7 @@ "\n", "In future tutorials, we will introduce the Downwelling Light Sensor (DLS) information into the calibration process in order to account for changing irradiance over time (e.g. such as clouds). However, since the panel method is straightforward and repeatable under constant illumination conditions, and is the standard scientific calibration method of surface reflectance, this process is useful and sufficient for many calibration needs.\n", "\n", - "Looking for more? Try the second tutorial [here](./MicaSense%20Image%20Processing%20Tutorial%202.html)." + "Looking for more? Try the second tutorial [here](./MicaSense%20Image%20Processing%20Tutorial%202.ipynb)." ] }, { diff --git a/MicaSense Image Processing Tutorial 2.ipynb b/MicaSense Image Processing Tutorial 2.ipynb index 82e191c8..c2d83b37 100644 --- a/MicaSense Image Processing Tutorial 2.ipynb +++ b/MicaSense Image Processing Tutorial 2.ipynb @@ -6,7 +6,7 @@ "source": [ "# Tutorial 2 - MicaSense library\n", "\n", - "This tutorial assumes you have gone through the [basic setup](./Micasense Image Processing Setup.html) and builds on the basic radiance, irradiance, and reflectance concepts and code covered in the [first tutorial](./MicaSense Image Processing Tutorial 1.html). \n", + "This tutorial assumes you have gone through the [basic setup](./Micasense Image Processing Setup.ipynb) and builds on the basic radiance, irradiance, and reflectance concepts and code covered in the [first tutorial](./MicaSense Image Processing Tutorial 1.ipynb). \n", "\n", "In this tutorial, we will cover usage of the MicaSense python library to access images and groups of images. Most of the processing details are hidden away in the library, but the library code is open and available in the git repository. \n", "\n", @@ -53,7 +53,7 @@ "\n", "Metadata for each image is available in the `Image.meta` parameter. This object is a `micasense.Metadata` object and can be accessed directly for image specific metadata extraction. Below, we print the same metadata values as we did in Tutorial #1, but using direct access to the `Metadata` object parameters.\n", "\n", - "A notebook for experimenting with the `Image` class can be found [here](Images.html)." + "A notebook for experimenting with the `Image` class can be found [here](Images.ipynb)." ] }, { @@ -160,9 +160,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "import os, glob\n", diff --git a/MicaSense Image Processing Tutorial 3.ipynb b/MicaSense Image Processing Tutorial 3.ipynb index 36c02fe0..8a651cf5 100644 --- a/MicaSense Image Processing Tutorial 3.ipynb +++ b/MicaSense Image Processing Tutorial 3.ipynb @@ -29,7 +29,7 @@ "source": [ "# Remote Sensing Theory\n", "\n", - "In [Tutorial 1](./MicaSense%20Image%20Processing%20Tutorial%201.html) we covered conversion of the image radiance to reflecance using the average radiance from an area of a specially made lambertian reflectance panel. As we did not get very far into remote sensing theory, we will cover that some here as we move on to the more complex problem of using the data from the RedEdge Downwelling Light Sensor (DLS). To get started, we will review some of the fundamental measurements in remote sensing.\n", + "In [Tutorial 1](./MicaSense%20Image%20Processing%20Tutorial%201.ipynb) we covered conversion of the image radiance to reflecance using the average radiance from an area of a specially made lambertian reflectance panel. As we did not get very far into remote sensing theory, we will cover that some here as we move on to the more complex problem of using the data from the RedEdge Downwelling Light Sensor (DLS). To get started, we will review some of the fundamental measurements in remote sensing.\n", "\n", "## Definition of Terms\n", "\n", @@ -101,8 +101,8 @@ "import os, glob\n", "import micasense.capture as capture\n", "\n", - "images_path = os.path.join('.','data','REDEDGE-P')\n", - "image_names = glob.glob(os.path.join(images_path,'IMG_0000_*.tif'))\n", + "images_path = os.path.join('.', 'data', 'REDEDGE-MX')\n", + "image_names = glob.glob(os.path.join(images_path, 'IMG_0001_*.tif'))\n", "cap = capture.Capture.from_filelist(image_names)" ] }, @@ -169,7 +169,7 @@ "plt.scatter(center_wavelengths,dls_irradiances)\n", "plt.xlabel('Wavelength (nm)')\n", "plt.ylabel('Irradiance ($W/m^2/nm$)')\n", - "plt.show();\n", + "plt.show()\n", "\n", "cap.plot_undistorted_reflectance(dls_irradiances)" ]