public EncodedMB() { pixels = Picture.create(16, 16, ColorSpace.YUV420J); nc = new int[16]; mx = new int[16]; my = new int[16]; }
private Picture splitSlice(Picture result, int mbX, int mbY, int sliceMbCount, boolean unsafe, int vStep, int vOffset) { Picture out = Picture.createCroppedHiBD(sliceMbCount << 4, 16, result.getLowBitsNum(), YUV422, null); if (unsafe) { int mbHeightPix = 16 << vStep; Picture filled = Picture.create(sliceMbCount << 4, mbHeightPix, YUV422); ImageOP.subImageWithFillPic8(result, filled, new Rect(mbX << 4, mbY << (4 + vStep), sliceMbCount << 4, mbHeightPix)); split(filled, out, 0, 0, sliceMbCount, vStep, vOffset); } else { split(result, out, mbX, mbY, sliceMbCount, vStep, vOffset); } return out; }
final Picture rgb = Picture.create(pixelWidth, pixelHeight, ColorSpace.RGB); final BufferedImage bufferedImage = new BufferedImage(pixelWidth, pixelHeight, BufferedImage.TYPE_3BYTE_BGR); final AvcCBox avcC = AvcCBox.parseAvcCBox(ByteBuffer.wrap(codecPrivateData)); decoder.addSps(avcC.getSpsList()); decoder.addPps(avcC.getPpsList()); final Picture buf = Picture.create(pixelWidth + ((16 - (pixelWidth % 16)) % 16), pixelHeight + ((16 - (pixelHeight % 16)) % 16), ColorSpace.YUV420J); final List<ByteBuffer> byteBuffers = splitMOVPacket(frameBuffer, avcC); final Picture pic = decoder.decodeFrameFromNals(byteBuffers, buf.getData());
private boolean test(File coded, File ref) throws IOException { BufferH264ES es = new BufferH264ES(NIOUtils.fetchFromFile(coded)); Picture buf = Picture.create(1920, 1088, ColorSpace.YUV420); H264Decoder dec = new H264Decoder(); Packet nextFrame; ByteBuffer _yuv = NIOUtils.fetchFromFile(ref); while ((nextFrame = es.nextFrame()) != null) { Picture out = dec.decodeFrame(nextFrame.getData(), buf.getData()).cropped(); Picture pic = out.createCompatible(); pic.copyFrom(out); int lumaSize = pic.getWidth() * pic.getHeight(); int crSize = lumaSize >> 2; int cbSize = lumaSize >> 2; ByteBuffer yuv = NIOUtils.read(_yuv, lumaSize + crSize + cbSize); if (!Platform.arrayEqualsByte(ArrayUtil.toByteArrayShifted(JCodecUtil2.getAsIntArray(yuv, lumaSize)), pic.getPlaneData(0))) return false; if (!Platform.arrayEqualsByte(ArrayUtil.toByteArrayShifted(JCodecUtil2.getAsIntArray(yuv, crSize)), pic.getPlaneData(1))) return false; if (!Platform.arrayEqualsByte(ArrayUtil.toByteArrayShifted(JCodecUtil2.getAsIntArray(yuv, cbSize)), pic.getPlaneData(2))) return false; } return true; } }
public Picture createCompatible() { return Picture.create(width, height, color); }
final Picture rgb = Picture.create(pixelWidth, pixelHeight, ColorSpace.RGB); final BufferedImage renderImage = new BufferedImage(pixelWidth, pixelHeight, BufferedImage.TYPE_3BYTE_BGR); final AvcCBox avcC = AvcCBox.parseAvcCBox(ByteBuffer.wrap(codecPrivateData)); decoder.addSps(avcC.getSpsList()); decoder.addPps(avcC.getPpsList()); final Picture buf = Picture.create(pixelWidth + ((16 - (pixelWidth % 16)) % 16), pixelHeight + ((16 - (pixelHeight % 16)) % 16), ColorSpace.YUV420J); final List<ByteBuffer> byteBuffers = splitMOVPacket(frameBuffer, avcC); final Picture pic = decoder.decodeFrameFromNals(byteBuffers, buf.getData());
@Override public byte[][] allocatePicture() { return Picture.create(1920, 1088, ColorSpace.YUV444).getData(); } }
public MBlockDecoderBase(SliceHeader sh, DeblockerInput di, int poc, DecoderState decoderState) { this.interpolator = new BlockInterpolator(); this.s = decoderState; this.sh = sh; this.di = di; this.poc = poc; this.mbb = new Picture[] { Picture.create(16, 16, s.chromaFormat), Picture.create(16, 16, s.chromaFormat) }; scalingMatrix = initScalingMatrix(sh); }
public static Picture fromBufferedImageRGB(BufferedImage src) { Picture dst = Picture.create(src.getWidth(), src.getHeight(), RGB); fromBufferedImage(src, dst); return dst; }
@Override public byte[][] allocatePicture() { return Picture.create(size.getWidth(), size.getHeight(), ColorSpace.YUV444).getData(); }
@Override public LoanerPicture getPicture(int width, int height, ColorSpace color) { for (Picture picture : buffers) { if (picture.getWidth() == width && picture.getHeight() == height && picture.getColor() == color) { buffers.remove(picture); return new LoanerPicture(picture, 1); } } return new LoanerPicture(Picture.create(width, height, color), 1); }
public void encodeNativeFrame(Picture pic) throws IOException { if (toEncode == null) { toEncode = Picture.create(pic.getWidth() , pic.getHeight() , encoder.getSupportedColorSpaces()[0]); } // Perform conversion transform.transform(pic, toEncode); // Encode image into H.264 frame, the result is stored in '_out' buffer _out.clear(); ByteBuffer result = encoder.encodeFrame(toEncode, _out); // Based on the frame above form correct MP4 packet spsList.clear(); ppsList.clear(); H264Utils.wipePS(result, spsList, ppsList); H264Utils.encodeMOVPacket(result); // Add packet to video track outTrack.addFrame(new MP4Packet(result, frameNo, timeScale, 1, frameNo, true, null, frameNo, 0)); frameNo++; }
public static Picture fromBufferedImage(BufferedImage src, ColorSpace tgtColor) { Picture rgb = fromBufferedImageRGB(src); Transform tr = ColorUtil.getTransform(rgb.getColor(), tgtColor); Picture res = Picture.create(rgb.getWidth(), rgb.getHeight(), tgtColor); tr.transform(rgb, res); return res; }
public static void savePictureAsPPM(Picture pic, File file) throws IOException { Transform transform = ColorUtil.getTransform(pic.getColor(), ColorSpace.RGB); Picture rgb = Picture.create(pic.getWidth(), pic.getHeight(), ColorSpace.RGB); transform.transform(pic, rgb); NIOUtils.writeTo(new PPMEncoder().encodeFrame(rgb), file); }
.create((size.getWidth() + 15) & ~0xf, (size.getHeight() + 15) & ~0xf, ColorSpace.YUV420) .getData())); if (i % 500 == 0)
picOut = Picture.create(mbWidth << 4, mbHeight << 4, ColorSpace.YUV420J);
private void decodeMacroblocks(Frame[][] refList) { Picture mb = Picture.create(16, 16, activeSps.chromaFormatIdc); int mbWidth = activeSps.picWidthInMbsMinus1 + 1; MBlock mBlock = new MBlock(activeSps.chromaFormatIdc); while (parser.readMacroblock(mBlock)) { decode(mBlock, parser.getSliceHeader().sliceType, mb, refList); int mbAddr = mapper.getAddress(mBlock.mbIdx); int mbX = mbAddr % mbWidth; int mbY = mbAddr / mbWidth; putMacroblock(frameOut, mb, mbX, mbY); di.shs[mbAddr] = parser.getSliceHeader(); di.refsUsed[mbAddr] = refList; fillCoeff(mBlock, mbX, mbY); mb.fill(0); mBlock.clear(); } }
Picture outMB = Picture.create(16, 16, ColorSpace.YUV420);
public Picture cropped() { if (!cropNeeded()) return this; Picture result = Picture.create(crop.getWidth(), crop.getHeight(), color); if(color.planar) { for (int plane = 0; plane < data.length; plane++) { if (data[plane] == null) continue; cropSub(data[plane], crop.getX() >> color.compWidth[plane], crop.getY() >> color.compHeight[plane], crop.getWidth() >> color.compWidth[plane], crop.getHeight() >> color.compHeight[plane], width >> color.compWidth[plane], crop.getWidth() >> color.compWidth[plane], result.data[plane]); } } else { cropSub(data[0], crop.getX(), crop.getY(), crop.getWidth(), crop.getHeight(), width * color.nComp, crop.getWidth() * color.nComp, result.data[0]); } return result; }
CAVLCWriter.writeSE(out, mv[1] - mvpy); // mvdy Picture mbRef = Picture.create(16, 16, sps.chromaFormatIdc); int[][] mb = new int[][] { new int[256], new int[64], new int[64] };