I have been trying to create a script that will fetch an image file, store it in a 2d grayscale non-alpha byte array based on the x and y axis so I can perform math operations then save that array back to a new image file. I have come close to doing so but something is not doing its job in the process. The resulting image has a lot of dark horizontal static and I upon the many pages I have searched and searched. Still no answer. Could somebody please explain what I am doing wrong here? Below is the basics of what I am doing and between the two big comment boxes is where I will be modifing "byte data[x][y]".
package org.test.proc;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.awt.image.DataBufferByte;
import javax.imageio.ImageIO;
public class Main {
public static void main(String[] args) {
System.out.println(args[0]);
BufferedImage img = null;
try {
img = ImageIO.read(new File(args[0]));
} catch (IOException e) {
e.printStackTrace();
}
int w=img.getWidth();
int h=img.getHeight();
byte[][] data_a = Main.getByteArray(img);
//////////////////////////////////
/// ALGORITHM PHASE START ///
//////////////////////////////////
/////////////////////////////////
/// ALGORITHM PHASE END ///
/////////////////////////////////
byte[] data_l = new byte[w*h*3];
int i=0;
for (int x=0;x<w;x++) {
for (int y=0;y<h;y++) {
data_l[i]=data_a[x][y];
i++;
data_l[i]=data_a[x][y];
i++;
data_l[i]=data_a[x][y];
i++;
//data_l[i]=127;
//i++;
}
}
try {
//BufferedImage image=null;
//image = ImageIO.read(new ByteArrayInputStream(data_l));
//System.out.println(image.toString());
//ImageIO.write(image, "JPG", new File("test.jpg"));
BufferedImage image = new BufferedImage(w, h, BufferedImage.TYPE_3BYTE_BGR);
image.getWritableTile(0, 0).setDataElements(0, 0, w, h, data_l);
ImageIO.write(image, "PNG", new File("test.png"));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private static byte[][] getByteArray(BufferedImage image) {
//http://stackoverflow.com/questions/6524196/java-get-pixel-array-from-image
final byte[] pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
final int width = image.getWidth();
final int height = image.getHeight();
final boolean hasAlphaChannel = (image.getType() != BufferedImage.TYPE_3BYTE_BGR);
byte[][] result = new byte[width][height];
if (hasAlphaChannel) {
for (int pixel = 0, row = 0, col = 0; pixel < pixels.length; pixel += 4) {
int argb = pixels[pixel+1]; // blue
argb += pixels[pixel + 2]; // green
argb += pixels[pixel + 3]; // red
argb/=3;
result[col][row] = (byte)argb;
col++;
if (col == width) {
col = 0;
row++;
}
}
} else {
for (int pixel = 0, row = 0, col = 0; pixel < pixels.length; pixel += 3) {
int argb = pixels[pixel]; // blue
argb += pixels[pixel+1]; // green
argb += pixels[pixel+2]; // red
argb/=3;
result[col][row] = (byte)argb;
col++;
if (col == width) {
col = 0;
row++;
}
}
}
return result;
}
}
Could somebody please take a look at this and tell me why the resulting image is just black static and not the same as the input image.
Thank you
Hoping for the best
cwarn23