Jump to content
  • 0

VGA Video output with AXI4S Video Out


Kampi

Question

Hello,

I try to generate a VGA signal with a VDMA, Video Timing, and AXI4 Stream to Video Out IP for my Zybo. So I create the following block design with the given settings (Note: I test the design with the test pattern generator instead of the VDMA before, so I know that the settings of the Timing Generator and Video Out IP are correct).

My code looks like this:

#ifdef WITH_TESTPATTERN
	#include "xv_tpg.h"
#endif

#include "xaxivdma.h"
#include "xparameters.h"

#ifdef WITH_TESTPATTERN
	XV_tpg TPG;
	XV_tpg_Config* TPG_Config;
#endif

XAxiVdma_Config* VDMA_Config;
XAxiVdma VDMA;
XAxiVdma_DmaSetup ReadConfiguration;

unsigned int frame_buffer[800][600][3];
unsigned int srcBuffer;

u32 Status;

void fill(void)
{
	for(u32 i = 0x00; i < 800; i++)
	{
		for(u32 j = 0x00; j < 600; j++)
		{
			frame_buffer[i][j][0] = 0xFF;
			frame_buffer[i][j][1] = 0xFF;
			frame_buffer[i][j][2] = 0xFF;
		}
	}
}

int main()
{
	#ifdef WITH_TESTPATTERN
		TPG_Config = XV_tpg_LookupConfig(XPAR_TESTPATTERN_DEVICE_ID);
		if(!TPG_Config)
		{
			xil_printf("Error during test pattern generator configuration!\n\r");
			return -1;
		}

		Status = XV_tpg_CfgInitialize(&TPG, TPG_Config, TPG_Config->BaseAddress);
		if(Status != XST_SUCCESS)
		{
			xil_printf("Error during test pattern generator initialization!\n\r");
			return -1;
		}

		XV_tpg_Set_height(&TPG, 600);
		XV_tpg_Set_width(&TPG, 800);
		XV_tpg_Set_bckgndId(&TPG, 0x0C);
		XV_tpg_EnableAutoRestart(&TPG);

		XV_tpg_Start(&TPG);
	#endif

	VDMA_Config = XAxiVdma_LookupConfig(XPAR_VIDEODMA_DEVICE_ID);
	if(!VDMA_Config)
	{
		xil_printf("Error during VDMA configuration!\n\r");
		return -1;
	}

	Status = XAxiVdma_CfgInitialize(&VDMA, VDMA_Config, VDMA_Config->BaseAddress);
	if(Status != XST_SUCCESS)
	{
		xil_printf("Error during VDMA initialization!\n\r");
		return -1;
	}

	ReadConfiguration.VertSizeInput       = 600;
	ReadConfiguration.HoriSizeInput       = 800 * (VDMA_Config->Mm2SStreamWidth >> 3);
	ReadConfiguration.Stride              = 800 * (VDMA_Config->Mm2SStreamWidth >> 3);
	ReadConfiguration.FrameDelay          = 0;
	ReadConfiguration.EnableCircularBuf   = 1;
	ReadConfiguration.EnableSync          = 0;
	ReadConfiguration.PointNum            = 0;
	ReadConfiguration.EnableFrameCounter  = 0;
	ReadConfiguration.FixedFrameStoreAddr = 0;

	Status = XAxiVdma_DmaConfig(&VDMA, XAXIVDMA_READ, &ReadConfiguration);
	if(Status != XST_SUCCESS)
	{
		xil_printf("Read channel configuration failed!\n\r");
		return -1;
	}

	fill();

	Status = XAxiVdma_DmaSetBufferAddr(&VDMA, XAXIVDMA_READ, (UINTPTR*)frame_buffer);
	if(Status != XST_SUCCESS)
	{
		xil_printf("Read channel set buffer address failed!\n\r");
		return -1;
	}

	Status = XAxiVdma_DmaStart(&VDMA, XAXIVDMA_READ);
	if(Status != XST_SUCCESS)
	{
		xil_printf("Failed to start DMA engine (read channel)!\n\r");
		return -1;
	}

	xil_printf("Start...\n\r");

	while(1)
	{

	}

	return 0;
}

But the monitor doesn´t show the picture (and no message that the signal is missing, so HSync and VSync work) and I´ve got the terminal message

Read channel set buffer address failed!

So what is wrong with the code?

Thank you guys :)

Blockdesign.png

VideoDMA.png

VideoTiming.png

VideOut.png

Link to comment
Share on other sites

5 answers to this question

Recommended Posts

Archived

This topic is now archived and is closed to further replies.

×
×
  • Create New...