Skip to content

Commit 7fa2af2

Browse files
authored
ver. 1.2.0
1 parent e1c3ade commit 7fa2af2

15 files changed

Lines changed: 332 additions & 44 deletions

DESCRIPTION

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
Package: coreCT
22
Type: Package
33
Title: Programmatic Analysis of Sediment Cores Using Computed Tomography Imaging
4-
Version: 1.1.2
5-
Date: 2017-08-12
4+
Version: 1.2.0
5+
Date: 2017-08-20
66
Author: Troy D. Hill <Hill.Troy@gmail.com>, Earl Davey
77
Maintainer: Troy D. Hill <Hill.Troy@gmail.com>
88
Description: Computed tomography (CT) imaging is a powerful tool for understanding the composition of sediment cores. This package streamlines and accelerates the analysis of CT data generated in the context of environmental science. Included are tools for processing raw DICOM images to characterize sediment composition (sand, peat, etc.). Root analyses are also enabled, including measures of external surface area and volumes for user-defined root size classes. For a detailed description of the application of computed tomography imaging for sediment characterization, see: Davey, E., C. Wigand, R. Johnson, K. Sundberg, J. Morris, and C. Roman. (2011) <DOI: 10.1890/10-2037.1>.

INDEX

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
conv Calculates the area and volume of material from a matrix of processed DICOM images
22
convDir Calculates the area and volume of material from a directory of raw DICOM images
3+
coreHist Whole-core frequency distribution of Hounsfield units
34
getSurface Remove artificial surface layers
45
rootSize Calculates root surface area from a matrix of processed DICOM images
56
rootSizeDir Calculates root surface area from a directory of raw DICOM images

NAMESPACE

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,17 @@
22

33
export(conv)
44
export(convDir)
5+
export(coreHist)
56
export(getSurface)
67
export(rootSize)
78
export(rootSizeDir)
89
export(voxDims)
10+
importFrom(grDevices,dev.off)
11+
importFrom(grDevices,png)
12+
importFrom(graphics,abline)
13+
importFrom(graphics,par)
14+
importFrom(graphics,plot)
15+
importFrom(graphics,text)
916
importFrom(igraph,decompose)
1017
importFrom(igraph,spectrum)
1118
importFrom(igraph,union)

R/conv.R

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
#' @title Convert a matrix of semi-processed DICOM images to mass and volume of material classes
22
#'
3-
#' @description Calculates the mass, cross-sectional area, and volume of material classes for each CT slice.
3+
#' @description Converts raw CT units to material classes for each CT slice.
44
#'
5-
#' @details Calculates the mass, cross-sectional area, and volume of material classes for each CT slice. This function requires that values be Hounsfield Units (i.e., data must be semi-processed from the raw DICOM imagery).
5+
#' @details Calculates average Hounsfield units, cross-sectional areas (cm2), volumes (cm3), and masses (g) of material classes for each CT slice. This function assumes that core walls and all non-sediment material have been removed from the raw DICOM imagery. This function converts data from raw x-ray attenuation values to Hounsfield Units, and then uses user-defined calibration rod inputs to categorize sediment components: air, roots and rhizomes, peat, water, particles, sand, and rock/shell.
66
#'
77
#' @usage conv(mat.list, upperLim = 3045, lowerLim = -1025,
88
#' pixelA, thickness = 0.625, # all in mm
@@ -68,8 +68,8 @@ conv <- function(mat.list, upperLim = 3045, lowerLim = -1025,
6868
glassHU = 1345.0696, glassSD = 45.4129,
6969
waterHU = 63.912, waterSD = 14.1728,
7070
densities = c(0.0012, 1, 1.23, 2.2) # format = air, water, Si, glass
71-
) {
72-
pb <- txtProgressBar(min = 0, max = length(mat.list), initial = 0, style = 3)
71+
) {
72+
pb <- utils::txtProgressBar(min = 0, max = length(mat.list), initial = 0, style = 3)
7373
voxelVol <- pixelA * thickness / 1e3 # cm3
7474
water.LB <- waterHU - waterSD
7575
water.UB <- waterHU + waterSD
@@ -80,7 +80,7 @@ conv <- function(mat.list, upperLim = 3045, lowerLim = -1025,
8080
upper = c(round(airHU + airSD), round(water.LB), round(water.UB), round(SiHU + SiSD), 750, round(glassHU + glassSD), round(upperLim)))
8181

8282
densitydf <- data.frame(HU = c(airHU, waterHU, SiHU, glassHU), density = densities)
83-
summary(lm1 <- lm(density ~ HU, data = densitydf)) # density in g/cm3
83+
summary(lm1 <- stats::lm(density ~ HU, data = densitydf)) # density in g/cm3
8484

8585
for (i in 1:length(mat.list)) {
8686
depth <- thickness * i / 10 # cm
@@ -101,15 +101,15 @@ conv <- function(mat.list, upperLim = 3045, lowerLim = -1025,
101101
vol.output <- temp.output[, 1:8] * (thickness / 10) # cm3
102102
names(vol.output) <- gsub("2", "3", names(temp.output)[1:8])
103103

104-
wetMass <- (temp * coef(lm1)[2] + coef(lm1)[1]) * voxelVol # convert to g/cm3 and then to g (wet) in each pixel
104+
wetMass <- (temp * stats::coef(lm1)[2] + stats::coef(lm1)[1]) * voxelVol # convert to g/cm3 and then to g (wet) in each pixel
105105
df1 <- data.frame(bin = bin, wetMass = wetMass, HU = temp)
106-
test <- aggregate(wetMass ~ bin, data = df1, sum) # sum mass by category
106+
test <- stats::aggregate(wetMass ~ bin, data = df1, sum) # sum mass by category
107107
test <- base::merge(data.frame(bin = splits$material), test, all = TRUE)
108108
mass.output <- data.frame(t(as.vector(test[, 2])))
109109
mass.output[is.na(mass.output)] <- 0
110110
names(mass.output) <- paste0(test[, 1], ".g")
111111

112-
meanHUs <- aggregate(HU ~ bin, data = df1, mean) # mean HU in each category
112+
meanHUs <- stats::aggregate(HU ~ bin, data = df1, mean) # mean HU in each category
113113
meanHUs <- base::merge(data.frame(bin = splits$material), meanHUs, all = TRUE)
114114
HU.output <- data.frame(t(as.vector(meanHUs[, 2])))
115115
# HU.output[is.na(HU.output)] <- 0 # leave as NA
@@ -130,7 +130,7 @@ conv <- function(mat.list, upperLim = 3045, lowerLim = -1025,
130130
} else {
131131
outDat <- outDat.init
132132
}
133-
setTxtProgressBar(pb, i)
133+
utils::setTxtProgressBar(pb, i)
134134
}
135135
outDat <- outDat[, c("depth", names(outDat)[!names(outDat) %in% "depth"])]
136136
outDat

R/convDir.R

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ convDir <- function(directory = file.choose(),
8282
# directory <- directory # do nothing
8383
} else stop("Incorrect directory name: directory specified in a character string must end with a '/'; if 'file.choose()' is used, the selected file must be a dicom image")
8484
# load DICOMs, takes a couple minutes
85-
fname <- readDICOM(directory, verbose = TRUE)
85+
fname <- oro.dicom::readDICOM(directory, verbose = TRUE)
8686

8787
} else if (exists(directory) & (sum(names(get(directory)) %in% c("hdr", "img")) == 2)){ # could have better error checking here
8888
fname <- get(directory)
@@ -91,11 +91,11 @@ convDir <- function(directory = file.choose(),
9191
# pixelArea <- voxDims(directory)$pixelArea.mm2
9292
# thick <- voxDims(directory)$thickness.mm
9393
pixelArea <- as.numeric(strsplit(fname$hdr[[1]]$value[fname$hdr[[1]]$name %in% "PixelSpacing"], " ")[[1]][1])^2
94-
thick <- unique(extractHeader(fname$hdr, "SliceThickness"))
94+
thick <- unique(oro.dicom::extractHeader(fname$hdr, "SliceThickness"))
9595

9696
# convert raw units to Hounsfield units
97-
ct.slope <- unique(extractHeader(fname$hdr, "RescaleSlope"))
98-
ct.int <- unique(extractHeader(fname$hdr, "RescaleIntercept"))
97+
ct.slope <- unique(oro.dicom::extractHeader(fname$hdr, "RescaleSlope"))
98+
ct.int <- unique(oro.dicom::extractHeader(fname$hdr, "RescaleIntercept"))
9999
HU <- lapply(fname$img, function(x) x*ct.slope + ct.int)
100100

101101
# pass data to conv()

R/coreHist.R

Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,130 @@
1+
#' @title Whole-core frequency distribution of Hounsfield units
2+
#'
3+
#' @description Provides the raw data and plots a frequency distibution for Hounsfield Units in the entire core, also delineating material classes.
4+
#'
5+
#' @usage coreHist(directory = file.choose(),
6+
#' units = "percent",
7+
#' upperLim = 3045, lowerLim = -1025,
8+
#' airHU = -850.3233, airSD = 77.6953,
9+
#' SiHU = 271.7827, SiSD = 39.2814,
10+
#' glassHU = 1345.0696, glassSD = 45.4129,
11+
#' waterHU = 63.912, waterSD = 14.1728,
12+
#' returnData = TRUE, pngName = NULL)
13+
#'
14+
#' @param directory a character string that can be (1) a matrix of DICOM images that exists in the global environment, or (2) the address of an individual DICOM file in a folder of DICOM images. The default action is <code>file.choose()</code>; a browser menu appears so the user can select the the desired directory by identifying a single DICOM file in the folder of images.
15+
#' @param units units to be used for plotting purposes: either "percent" (the default) or "absolute"
16+
#' @param upperLim upper bound cutoff for pixels (Hounsfield Units); upper bound is inclusive
17+
#' @param lowerLim lower bound cutoff for pixels (Hounsfield Units); lower bound is exclusive
18+
#' @param airHU mean value for air-filled calibration rod (Hounsfield Units)
19+
#' @param airSD standard deviation for air-filled calibration rod
20+
#' @param SiHU mean value for colloidal silica calibration rod
21+
#' @param SiSD standard deviation for colloidal Si calibration rod
22+
#' @param glassHU mean value for glass calibration rod
23+
#' @param glassSD standard deviation for glass calibration rod
24+
#' @param waterHU mean value for water filled calibration rod
25+
#' @param waterSD standard deviation for water filled calibration rod
26+
#' @param returnData if \code{TRUE}, voxel counts for each Hounsfield unit from \code{lowerLim} to \code{upperLim} are returned, as are material class definitions. These are the data needed to re-create and modify the frequency plot.
27+
#' @param pngName if this is not \code{NULL}, the frequency plot is saved to disk. In that case, \code{pngName} should be a character string containing the name and address of the file.
28+
#'
29+
#' @return list if \code{returnData = TRUE}, a list is returned containing the frequencies for each Hounsfield unit value from \code{lowerLim} to \code{upperLim}, and (2) the boundaries for material classes. Lower boundaries for a component class are exclusive, while upper bounds are inclusive. These materials allow the frequency distribution to be plotted by the user. If \code{returnData = FALSE} the data are plotted in the graphics window, but nothing is preserved.
30+
#'
31+
#' @examples
32+
#' # data(core_426)
33+
#' coreHist("core_426", returnData = FALSE)
34+
#'
35+
#' @importFrom oro.dicom extractHeader
36+
#' @importFrom oro.dicom readDICOM
37+
#' @importFrom grDevices dev.off
38+
#' @importFrom grDevices png
39+
#' @importFrom graphics plot
40+
#' @importFrom graphics abline
41+
#' @importFrom graphics text
42+
#' @importFrom graphics par
43+
#'
44+
#' @export
45+
46+
coreHist <- function(directory = file.choose(),
47+
units = "percent",
48+
upperLim = 3045, lowerLim = -1025,
49+
airHU = -850.3233, airSD = 77.6953, # all cal rod arguments are in Hounsfield Units
50+
SiHU = 271.7827, SiSD = 39.2814,
51+
glassHU = 1345.0696, glassSD = 45.4129,
52+
waterHU = 63.912, waterSD = 14.1728,
53+
returnData = TRUE, pngName = NULL) {
54+
if (!exists(directory)) { # is "directory" an existing object (user-loaded DICOM matrix)
55+
56+
if (substr(directory, nchar(directory) - 3, nchar(directory)) %in% ".dcm") {
57+
directory <- dirname(directory)
58+
} else if (grep("/", directory) == 1) { # dangerously assumes that if there's a forward slash, it's a valid address
59+
# directory <- directory # do nothing
60+
} else stop("Incorrect directory name: directory specified in a character string must end with a '/'; if 'file.choose()' is used, the selected file must be a dicom image")
61+
# load DICOMs, takes a couple minutes
62+
fname <- oro.dicom::readDICOM(directory, verbose = TRUE)
63+
64+
} else if (exists(directory) & (sum(names(get(directory)) %in% c("hdr", "img")) == 2)){ # could have better error checking here
65+
fname <- get(directory)
66+
} else stop("Invalid input: 'directory' object or file location is incorrectly specified.")
67+
68+
# divisions between material classes
69+
splits <- data.frame(material = c("air", "RR", "water", "peat", "particles", "sand", "rock_shell"),
70+
lower = c(round(lowerLim), round(airHU + airSD), round(waterHU - waterSD), round(waterHU + waterSD), round(SiHU + SiSD), 750, round(glassHU + glassSD)),
71+
#lower = c(round(lowerLim), round(airHU+airSD) + 1, round(water.LB) + 1, round(water.UB) + 1, round(SiHU + SiSD) + 1, 750 + 1, round(glassHU + glassSD) + 1),
72+
upper = c(round(airHU + airSD), round(waterHU - waterSD), round(waterHU + waterSD), round(SiHU + SiSD), 750, round(glassHU + glassSD), round(upperLim)))
73+
74+
75+
pixelArea <- as.numeric(strsplit(fname$hdr[[1]]$value[fname$hdr[[1]]$name %in% "PixelSpacing"], " ")[[1]][1])^2
76+
thick <- unique(oro.dicom::extractHeader(fname$hdr, "SliceThickness"))
77+
78+
# convert raw units to Hounsfield units
79+
ct.slope <- unique(oro.dicom::extractHeader(fname$hdr, "RescaleSlope"))
80+
ct.int <- unique(oro.dicom::extractHeader(fname$hdr, "RescaleIntercept"))
81+
HU <- lapply(fname$img, function(x) x*ct.slope + ct.int)
82+
83+
# tempDat <- as.data.frame(table(unlist(HU))) # Takes a long time. started at 7:27pm
84+
# tempDat <- lapply(HU, table)
85+
# tempDat2 <- plyr::join_all(lapply(tempDat, as.data.frame, stringsAsFactors = FALSE), by = "Var1")
86+
# tempDat2.temp <- do.call(rbind, HU)
87+
# tempDat2 <- data.frame(table(tempDat2.temp))
88+
# names(tempDat2)[1] <- "Var1"
89+
# tempDat2$Var1 <- as.numeric(tempDat2$Var1)
90+
# tempDat2$finalFreq <- rowSums(tempDat2[, 2:ncol(tempDat2)], na.rm = TRUE)
91+
92+
for ( i in 1:length(HU)) {
93+
if ( i == 1) {
94+
test <- tabulate((HU[[i]] - lowerLim), nbins = upperLim - lowerLim + 1) # all values need to be positive integers
95+
} else {
96+
test <- test + tabulate((HU[[i]] - lowerLim), nbins = upperLim - lowerLim + 1)
97+
}
98+
}
99+
tempDat2 <- data.frame(Var1 = (lowerLim + 1):(upperLim + 1), finalFreq = test)
100+
101+
102+
if (units == "percent") {
103+
tot <- sum(tempDat2$finalFreq, na.rm = TRUE)
104+
ylabel <- "Frequency (% of total voxels)"
105+
} else {
106+
tot <- 1
107+
ylabel <- "Frequency (no. of voxels)"
108+
}
109+
if (!is.null(pngName)) {
110+
grDevices::png(file = pngName, width = 4, height = 3.5, units = "in")
111+
}
112+
graphics::par(mar = c(4, 4, 0.5, 0.5))
113+
graphics::plot(finalFreq / tot ~ Var1, tempDat2[(tempDat2$Var1 < upperLim) & (tempDat2$Var1 > lowerLim), ], cex = 0.7,
114+
pch = 19, las = 1, ylab = ylabel, xlab = "HU", xlim = c(lowerLim, upperLim))
115+
# add lines and label material classes
116+
graphics::abline(v = c(lowerLim + 1, splits$upper))
117+
verticalTextPosition <- max(tempDat2[(tempDat2$Var1 < upperLim) & (tempDat2$Var1 > lowerLim), ], na.rm = TRUE) * c(0.90, 0.70) / tot
118+
graphics::text(x = (splits$upper + splits$lower)/2, y = verticalTextPosition, labels = as.character(splits$material)) # all classes
119+
# graphics::text(x = (splits$upper[-3] + splits$lower[-3])/2, y = verticalTextPosition, labels = as.character(splits$material)[-3]) # without water
120+
121+
if (!is.null(pngName)) {
122+
grDevices::dev.off()
123+
}
124+
125+
if (returnData == TRUE) {
126+
outDat <- tempDat2[(tempDat2$Var1 < upperLim) & (tempDat2$Var1 > lowerLim), c("Var1", "finalFreq")]
127+
return(list(histData = outDat, splits = splits))
128+
}
129+
130+
}

R/core_426.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
#' Ninety-five CT scans from the top of a Spartina alterniflora core
2-
#' @format A list of 95 matrices, each with two elements: header and image data
1+
#' Three computed tomography scans from a Spartina alterniflora core
2+
#' @format A list of 3 matrices, each with two elements: header and image data
33
#' @docType data
44
#' @keywords datasets
55
#' @name core_426

R/rootSizeDir.R

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ rootSizeDir <- function (directory = file.choose(),
7070
# directory <- directory # do nothing
7171
} else stop("Incorrect directory name: directory specified in a character string must end with a '/'; if 'file.choose()' is used, the selected file must be a dicom image")
7272
# load DICOMs, takes a couple minutes
73-
fname <- readDICOM(directory, verbose = TRUE)
73+
fname <- oro.dicom::readDICOM(directory, verbose = TRUE)
7474

7575
} else if (exists(directory) & (sum(names(get(directory)) %in% c("hdr", "img")) == 2)){ # could have better error checking here
7676
fname <- get(directory)
@@ -79,11 +79,11 @@ rootSizeDir <- function (directory = file.choose(),
7979
# pixelArea <- voxDims(directory)$pixelArea.mm2
8080
# thick <- voxDims(directory)$thickness.mm
8181
pixelArea <- as.numeric(strsplit(fname$hdr[[1]]$value[fname$hdr[[1]]$name %in% "PixelSpacing"], " ")[[1]][1])^2
82-
thick <- unique(extractHeader(fname$hdr, "SliceThickness"))
82+
thick <- unique(oro.dicom::extractHeader(fname$hdr, "SliceThickness"))
8383

8484
# convert raw units to Hounsfield units
85-
ct.slope <- unique(extractHeader(fname$hdr, "RescaleSlope"))
86-
ct.int <- unique(extractHeader(fname$hdr, "RescaleIntercept"))
85+
ct.slope <- unique(oro.dicom::extractHeader(fname$hdr, "RescaleSlope"))
86+
ct.int <- unique(oro.dicom::extractHeader(fname$hdr, "RescaleIntercept"))
8787
HU <- lapply(fname$img, function(x) x*ct.slope + ct.int)
8888

8989
# pass data to rootSize()

R/voxDims.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,15 +26,15 @@ voxDims <- function(directory = file.choose()) {
2626
# directory <- directory
2727
} else stop("Incorrect directory name: directory specified in a character string must end with a '/'; if 'file.choose()' is used, the selected file must be a dicom image")
2828
# load DICOMs, takes a couple minutes
29-
fname <- readDICOM(directory, verbose = TRUE)
29+
fname <- oro.dicom::readDICOM(directory, verbose = TRUE)
3030

3131
} else if (exists(directory) & (sum(names(get(directory)) %in% c("hdr", "img")) == 2)){ # could have better error checking here
3232
fname <- get(directory)
3333
} else stop("Invalid input: 'directory' object or file location is incorrectly specified.")
3434

3535
# scrape some metadata
3636
pixelArea <- as.numeric(strsplit(fname$hdr[[1]]$value[fname$hdr[[1]]$name %in% "PixelSpacing"], " ")[[1]][1])^2
37-
thick <- unique(extractHeader(fname$hdr, "SliceThickness"))
37+
thick <- unique(oro.dicom::extractHeader(fname$hdr, "SliceThickness"))
3838

3939
returnDat <- data.frame(pixelArea.mm2 = pixelArea, thickness.mm = thick)
4040
returnDat

inst/CITATION

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@ bibentry("Manual",
88

99
textVersion =
1010
paste("Troy D. Hill and Earl Davey (2017). ",
11-
"Programmatic analysis of sediment cores using computed tomography imaging",
12-
"Narragansett, RI",
11+
"Programmatic analysis of sediment cores using computed tomography imaging. ",
12+
"Narragansett, RI. ",
1313
"URL https://github.com/troyhill/coreCT",
1414
sep=""),
1515

0 commit comments

Comments
 (0)